%*********************************************************************************
%       Manuscript       Vertex-transitive direct products       March 2017
%
%	  Revised April 2018
%
%*********************************************************************************

\documentclass[12pt]{article}
\usepackage{e-jc}
\specs{}{}{}

\usepackage{amsmath, amssymb}
%\usepackage{latexsym}
\usepackage{mathrsfs}
\usepackage{float}
\usepackage[colorlinks=true,citecolor=black,linkcolor=black,urlcolor=blue]{hyperref}
\usepackage{MnSymbol}
\usepackage{tikz} % Tikz package is needed for the graphics.

\begin{document}

\title{Vertex-transitive direct products of graphs}

\author{Richard H.~Hammack\thanks{Supported by Simons Foundation Collaboration Grant for Mathematicians 523748.}\\
\small Department of Mathematics\\[-0.8ex]
\small Virginia Commonwealth University\\[-0.8ex] 
\small Richmond, Virginia U.S.A.\\
\small\tt rhammack@vcu.edu\\
\and
Wilfried Imrich\\
\small Dept.~of Mathematics and Information Technology\\[-0.8ex]
\small Montanuniversit\"at Leoben\\[-0.8ex]
\small  Leoben, Austria\\
\small\tt wilfried.imrich@unileoben.ac.at}

\dateline{May 3, 2017}{Apr 3, 2018}{TBD}
\MSC{05C76, 05C75}
\Copyright{The authors. Released under the CC BY-ND license (International 4.0).}


\maketitle

\begin{abstract}
It is known that for graphs $A$ and $B$ with odd cycles, the
direct product $A\times B$ is vertex-transitive if and only if both
$A$ and $B$ are vertex-transitive. But this is not
necessarily true if one of $A$ or $B$ is bipartite,
and until now there has been no characterization
of such vertex-transitive direct products.
We prove that if $A$ and $B$ are both bipartite, or both
non-bipartite, then $A\times B$ is vertex-transitive if and
only if both $A$ and $B$ are vertex-transitive.
Also, if $A$ has an odd cycle and $B$ is
bipartite, then $A\times B$ is
vertex-transitive if and only if both $A\times K_2$ and $B$ are
vertex-transitive.
\end{abstract}


%\begin{keyword}
%graph direct product \sep bipartite graphs \sep vertex-transitive graph
%\end{keyword}

%\end{frontmatter}


\section{Introduction}
\label{Section:Intro}

Our graphs are finite, without multiple edges, but may
have loops. The set of isomorphism classes of graphs
that may have loops is denoted $\Gamma_0$, while
$\Gamma$ denotes those without loops. Thus
$\Gamma\subset \Gamma_0$. We denote the complete graph
on $n$ vertices as $K_n$, whereas $K_n^*$ is
$K_n$ with a loop added to each vertex.
The automorphism group of a graph $G$ is denoted as ${\rm Aut}(G)$.
We say~$G$ is {\bf vertex-transitive} if for any two vertices $x,y\in V(G)$,
there is a $\varphi \in \mbox{Aut}(G)$ for which $\varphi(x)=y$.

Recall that the {\bf direct product} of two graphs $A,B$
in $\Gamma_0$ or $\Gamma$ is the graph $A\times B$ with
vertices $V(A)\times V(B)$ and edges
\[E(A\times B)=\big\{(a,b)(a',b') \mid
aa'\in E(A) \mbox{ and } bb'\in E(B)\big\}.\]
Figure~\ref{Fig:Direct} shows an example.

%%%%%%%%%%%%  BEGIN FIGURE 1 %%%%%%%%%%%
\begin{figure}[t]
\centering
\begin{tikzpicture}[scale=0.9, style=thick]
\foreach \x in {12} \draw (\x,-0.5)--+(0,1) node [left] {$K_2\;$};
\draw (16,0.5) node {$A\times K_2$};
\draw (13,-0.5)--+(0,1)--+(1,0)--+(2,1)--+(2,0)--+(1,1)--+(0,0);
\draw (13,-1.3)..controls +(-0.7,0.7) and +(-0.7,-0.7)..(13,-1.3);
\draw (15,-1.3)..controls +(0.7,0.7) and +(0.7,-0.7)..(15,-1.3);
\draw (13,-1.3)--+(2,0);
\draw (15.5,-1.3) node [right]  {$A$};
\foreach \x in {12,13,14,15} \foreach \y in {-0.5,0.5} \draw (\x,\y) [fill=white] circle (0.08);
\foreach \x in {13,14,15}  \draw (\x,-1.3) [fill=white] circle (0.08);
\end{tikzpicture}
\caption{The direct product of graphs.}
\label{Fig:Direct}
\end{figure}
%%%%%%%%%%%% END FIGURE 1 %%%%%%%%%%%%%


This product is commutative and associative in the sense that
the maps $(x,y)\mapsto (y,x)$ and $(x,(y,z))\mapsto ((x,y),z)$ are
isomorphisms
$A\times B\to B\times A$ and $A\times(B\times C)\to (A\times B)\times C$.
If $+$ represents disjoint union, then the
distributive law $A\times(B+C)=A\times B +A\times C$ holds,
which is equality of graphs, rather than
just isomorphism.
Recall also Weichsel's theorem~\cite[Theorem~5.9]{HIK}.

\begin{theorem}
\label{Theo:Weichsel}
A direct product $A\times B$ of connected graphs is
connected if and only if at least one factor has an
odd cycle; if both factors are bipartite, then
the product has exactly two components.
In general, if both $A$ and $B$ have odd cycles, then
so does $A\times B$. Moreover, if $B$ is bipartite,
with bipartition $X\cup Y$, then $A\times B$ is
bipartite, with bipartition $V(A)\times X \,\cup\, V(A)\times Y$.
\end{theorem}

It is easy to verify that if $B$ is bipartite,
then $K_2\times B=B+B$.

Note that $G\times K_1^*\cong G$ for any
$G$, so $K_1^*$ is the unit for the direct product. A graph
$G$ is called {\bf prime} over the direct product if it has
more than one vertex, and whenever $G\cong A\times B$,
one of $A$ or $B$ is isomorphic to $G$, and the other is
$K_1^*$. A result of McKenzie~\cite{FACTOR}
implies that any finite, connected non-bipartite graph factors
uniquely into prime graphs over the direct product, up to
order and isomorphism of the factors. See also Chapter 8
of~\cite{HIK}.

A consequence of unique prime factorization of
connected non-bipartite graphs over
the direct product, Theorem 8.19 of~\cite{HIK} states that
any [non-bipartite] direct product is vertex-transitive
if an only if each factor is vertex-transitive.
Unfortunately, the condition of non-bipartiteness was
inadvertently omitted in the statement of Theorem~8.19~\cite{HIK}.
Indeed, the theorem is false for non-bipartite graphs, as
is seen in Figure~\ref{Fig:Direct}, where
$A\times K_2$ is the (vertex-transitive) 6-cycle,
but $A$ is not vertex-transitive.

Until now, no characterization of bipartite vertex-transitive direct
products had been known. In Section~\ref{Section:Main} we give the following
complete characterization.

\medskip
\noindent
{\bf Theorem~\ref{Theo:Main}.} {\em If $A$ and $B$ are both bipartite or both non-bipartite, then
$A\times B$ is vertex-transitive
if and only if both $A$ and $B$ are vertex-transitive.
If $A$ has an odd cycle and $B$ is bipartite, then $A\times B$ is
vertex-transitive if and only if both $A\times K_2$ and $B$ are
vertex-transitive.}
\medskip

One direction of this theorem is elementary,
and follows from our next proposition.
\begin{proposition}
\label{Prop:EasyDirection}
If both $A$ and $B$ are vertex-transitive graphs,
then $A\times B$ is vertex-transitive.
If both $A\times K_2$ and $B$ are vertex-transitive,
and $B$ is bipartite, then $A\times B$ is vertex-transitive.
\end{proposition}

\begin{proof}
For the first statement, suppose both $A$ and $B$ are
vertex-transitive.
Given two vertices $(a,b)$ and $(a',b')$ of
$A\times B$, select automorphisms $\alpha$ of $A$ and
$\beta$ of $B$ for which $\alpha(a)=a'$ and $\beta(b)=b'$.
%{\sk (In case $a=a'$ (resp.\ $b=b'$), $\alpha$ (resp.\ $\beta$)  can be selected as the identity.)}
By the definition of the direct product,
$(x,y)\mapsto(\alpha(x),\beta(y))$
is an automorphism of $A\times B$ sending $(a,b)$ to $(a',b')$,
so $A\times B$ is vertex-transitive.

For the second statement, suppose both $A\times K_2$ and $B$ are vertex-transitive
and $B$ is bipartite. Then the above paragraph implies that
$(A\times K_2)\times B$ is vertex-transitive. But
\[(A\times K_2)\times B\cong A\times (K_2\times B)\cong
A\times (B+B)= A\times B+A\times B,\]
which is to say that the graph consisting of two copies of $A\times B$ is
vertex-transitive. Then certainly each copy of $A\times B$
is vertex-transitive.
\end{proof}

The converse of our main theorem is
more subtle, and some machinery is needed
to attack it. To this end,
Section~\ref{Section:Cartesian} reviews
the Cartesian product of graphs, and their unique prime
factorizations.
This
is followed by sections on $R$-thinness and Cartesian skeletons.
Section~\ref{Section:Main} proves our main theorem.
There we will need the Lov\'{a}sz
cancellation laws:

\begin{theorem}[Lov\'{a}sz~\cite{LOVASZ}]
\label{Theorem:Lovasz}
Suppose $A, B$ and $C$ are graphs, and $C$ has at least one edge.
Then $A\times C\cong B\times C$ implies $A\cong B$ provided that
\vspace{-0.1in}
\begin{itemize}
\setlength\itemsep{-5pt}
\item $C$ has an odd cycle, or
%\item there are homomorphisms $A\to C$ and $B\to C$, or
\item $A$ and  $B$ are both bipartite.
\end{itemize}
\end{theorem}

But before reviewing further preliminaries, some examples
will put our results in context. Example 1 involves a factor with loops, Example 2 a disconnected
factor without loops, and Example 3 involves a connected factor without loops.

\begin{example}
The graph $A$ in Figure~\ref{Fig:Direct} is not vertex-transitive.
However, the figure shows that $A\times K_2$ is vertex-transitive.
If $B$ is a bipartite vertex-transitive graph
(such as the 4-cycle), then $A\times B$ is vertex-transitive
by Proposition~\ref{Prop:EasyDirection}.
So here we have a vertex-transitive product $A\times B$, where $A$ is not vertex-transitive,
but both $A\times K_2$ and $B$ are.
\end{example}

\begin{example}
The graph $A=C_3+C_6$ is not vertex-transitive because one component is a triangle
and the other is a hexagon. But Figure~\ref{Fig:ExDisconn} shows that
$A\times K_2$ is three copies of a hexagon, which {\it is} vertex-transitive.
If $B$ is a bipartite vertex-transitive graph, then
Proposition~\ref{Prop:EasyDirection} says
$A\times B$ is vertex-transitive.
So $A\times B$ is vertex-transitive, where $A$ is not vertex-transitive,
but both $A\times K_2$ and $B$ are.

%%%%%%%%%%%% BEGIN FIGURE 2 %%%%%%%%%%%%%%%
\begin{figure}[h]
\centering
\begin{tikzpicture}[style=thick, scale=1]
\def\vr{2pt}
\draw (0.3,0)--(0.3,1) node [left] {$K_2\;$};
\draw (1,0)--(2,1)--(3,0)--(1,1)--(2,0)--(3,1)--(1,0);
\draw (4,0)--(5,1)--(6,0)--(7,1)--(8,0)--(9,1)--(4,0);
\draw [densely dashed] (4,1)--(5,0)--(6,1)--(7,0)--(8,1)--(9,0)--(4,1);
\draw (1,-0.7)--(3,-0.7)..controls +(0,-0.7) and +(0,-0.7)..(1,-0.7);
\draw (4,-0.7)--(9,-0.7)..controls +(0,-0.7) and +(0,-0.7)..(4,-0.7);
\foreach \x in {0.3,1,2,3,4,5,6,7,8,9} \foreach \y in {0,1} \draw [fill=white] (\x,\y) circle (\vr);
\foreach \x in {1,2,3,4,5,6,7,8,9} \foreach \y in {-0.7} \draw [fill=white] (\x,\y) circle (\vr);
\draw (9.5,-0.7) node {$A$};
\draw (9.25,1) node [right] {$A\times K_2$};
\end{tikzpicture}
\caption{The disconnected graph $A$ is not vertex-transitive, but its product with $K_2$ is.}
\label{Fig:ExDisconn}
\end{figure}
\end{example}
%%%%%%%%%%%%% END FIGURE 2 %%%%%%%%%%%%%%%%

\begin{example}
Figure~\ref{Fig:ExConn} shows a graph $A$ and the product $A\times K_2$.
For brevity, vertices $(x,\varepsilon)$ of $A\times K_2$ are written as $x\varepsilon$, and,
for clarity, edges are encoded dashed, dotted, solid black and solid gray. The dashed outer
hexagon~$H$ in~$A$ corresponds to a subgraph  $H\times K_2=H+H$ in the product,
 which is shown as two dashed copies of~$H$.
Similarly each solid (black or gray) triangle $T$ in~$A$ corresponds to a solid (black or gray) hexagon
$T\times K_2$ in the product.

%%%%%%%%%%%%% BEGIN FIGURE 3 %%%%%%%%%%%%%%%%
\begin{figure}[h]
\centering
\begin{tikzpicture}[style=thick, scale=1]
\def\vr{2pt}
\def\rr{2.5}
\def\rrr{1.75}
\def\rrrr{0.75}
\def\off{-8}
\def\offf{-4.75}

% Radial lines
\foreach \x in{ 30, 90,150,210,270,330} \draw [dotted] (\off,0) +(\x:\rrr)--+(\x:\rrrr);
% Triangle
\foreach \x in{ 90,210,330} \draw [line width=2.2pt, lightgray] (\off,0) +(\x:\rrrr)--+(\x+120:\rrrr);
% Other triangle
\foreach \x in{ 30,150,270} \draw [very thick] (\off,0) +(\x:\rrrr)--+(\x+120:\rrrr);
% Outer Hexagon
\foreach \x in {30, 90,150,210,270,330} \draw [densely dashed] (\off,0) +(\x:\rrr)--+(\x+60:\rrr);
\foreach \x in {30, 90,150,210,270,330} \foreach \y in {\rrr, \rrrr} \draw (\off,0) +(\x:\y) [fill=white] circle (\vr);

\draw (\off,0) node {$A$};
\draw (0,0) node {$A\times K_2$};
{\small
\draw (\off,0) +(90:\rrr+0.3) node {$a$};
\draw (\off,0) +(30:\rrr+0.3) node {$b$};
\draw (\off,0) +(-30:\rrr+0.3) node {$c$};
\draw (\off,0) +(-90:\rrr+0.3) node {$d$};
\draw (\off,0) +(-150:\rrr+0.3) node {$e$};
\draw (\off,0) +(150:\rrr+0.3) node {$f$};

\draw (\off,0) +(75:1.2*\rrrr) node {$0$};
\draw (\off,0) +(15:1.2*\rrrr) node {$1$};
\draw (\off,0) +(-45:1.2*\rrrr) node {$2$};
\draw (\off,0) +(-105:1.2*\rrrr) node {$3$};
\draw (\off,0) +(-165:1.2*\rrrr) node {$4$};
\draw (\off,0) +(132:1.2*\rrrr) node {$5$};
}

\draw (-5.75,0) node {\Large $\times$};

% Draw K_2
\draw (\offf,-0.75)--(\offf,0.75);
\foreach \y in {0.75} \draw (\offf,\y) [fill=black] circle (\vr);
\foreach \y in {-0.75} \draw (\offf,\y) [fill=white] circle (\vr);
\draw (\offf, 0.9) node [above] {\small $1$};
\draw (\offf, -0.9) node [below] {\small $0$};
\draw (-3.75,0) node {\Large $=$};

% Draw non-hexagon edges
\foreach \x in {7.5, 37.5, ...,352.5} \draw [dotted] (\x:\rr)--(\x+15:\rr);
% Draw hexagon
\draw [densely dashed] (7.5:\rr)--(7.5+105:\rr)--(7.5+120:\rr)--(7.5+225:\rr)--(7.5+240:\rr)--(7.5+345:\rr)--(7.5+360:\rr);
% Draw hexagon
\draw  [line width=2.2pt, lightgray] (37.5:\rr)--(37.5+105:\rr)--(37.5+120:\rr)--(37.5+225:\rr)--(37.5+240:\rr)--(37.5+345:\rr)--(37.5+360:\rr);
% Draw hexagon
\draw [densely dashed] (67.5:\rr)--(67.5+105:\rr)--(67.5+120:\rr)--(67.5+225:\rr)--(67.5+240:\rr)--(67.5+345:\rr)--(67.5+360:\rr);
%\draw twisted hexagon
\draw [very thick] (82.5:\rr)--(82.5+135:\rr)--(82.5+120:\rr)--(82.5+255:\rr)--(82.5+240:\rr)--(82.5+375:\rr)--(82.5:\rr);
\foreach \x in {7.5, 37.5, ...,352.5} \draw [fill=white](\x:\rr) circle (\vr);
\foreach \x in {-7.5, -37.5,...,-352.5} \draw (\x:\rr) [fill=black] circle (\vr);

{\small
\draw (7.5:\rr+0.4) node {$c0$};
\draw (7.5+15:\rr+0.4) node {$21$};
\draw (7.5+30:\rr+0.4) node {$40$};
\draw (7.5+45:\rr+0.4) node {$e1$};
\draw (7.5+60:\rr+0.4) node {$d0$};
\draw (7.5+75:\rr+0.4) node {$31$};
\draw (7.5+90:\rr+0.4) node {$10$};
\draw (7.5+105:\rr+0.4) node {$b1$};
\draw (7.5+120:\rr+0.4) node {$a0$};
\draw (7.5+135:\rr+0.4) node {$01$};
\draw (7.5+150:\rr+0.4) node {$20$};
\draw (7.5+165:\rr+0.4) node {$c1$};
\draw (7.5+180:\rr+0.4) node {$b0$};
\draw (7.5+195:\rr+0.4) node {$11$};
\draw (7.5+210:\rr+0.4) node {$50$};
\draw (7.5+225:\rr+0.4) node {$f1$};

\draw (7.5+240:\rr+0.4) node {$e0$};
\draw (7.5+255:\rr+0.4) node {$41$};
\draw (7.5+270:\rr+0.4) node {$00$};
\draw (7.5+285:\rr+0.4) node {$a1$};
\draw (7.5+300:\rr+0.4) node {$f0$};
\draw (7.5+315:\rr+0.4) node {$51$};
\draw (7.5+330:\rr+0.4) node {$30$};
\draw (7.5+345:\rr+0.4) node {$d1$};
}
\end{tikzpicture}
\caption{The graph $A$ is not vertex-transitive, but its product with $K_2$ is.}
\label{Fig:ExConn}
\end{figure}
%%%%%%%%%%%%% END FIGURE 3 %%%%%%%%%%%%%%%%

The graph $A$ is not vertex-transitive because some of its
vertices are on triangles and others are not.  But the product $A\times K_2$
{\it is} vertex-transitive, though seeing this may take a moment of reflection.
Note that the permutation $\pi=(a\, b\, c\, d\, e\, f)(0 \,1\, 2 \,3 \,4\, 5)$ that rotates $A$ by
$60^\circ$ induces
an automorphism $x\varepsilon\mapsto \pi(x)\varepsilon$ of $A\times K_2$ that sends the
``twisted'' solid black hexagon to the ``untwisted'' solid gray hexagon.

Also the following automorphism of order 2 exchanges the two dashed
hexagons in the product with the two solid hexagons.
\[\begin{array}{cccccc c cccccc}
a0 & b1 & c0 & d1 & e0 & f1& \mbox{\rule{0.2in}{0in}} & a1 & b0 & c1 & d0 & e1 & f0\\
\updownarrow & \updownarrow & \updownarrow & \updownarrow & \updownarrow & \updownarrow & 
&
\updownarrow & \updownarrow & \updownarrow & \updownarrow & \updownarrow & \updownarrow \\
01 & 40 & 21 & 00 & 41 & 20 & & 30 & 11 & 50 & 31 & 10 & 51 
\end{array}\]
 The other symmetries are more transparent, arising from
rotations and reflections of the product's drawing.

Now, if $B$ is a bipartite vertex-transitive graph,
then $A\times B$ is vertex-transitive
by Proposition~\ref{Prop:EasyDirection}.
So we have a vertex-transitive product $A\times B$, where $A$ is not vertex-transitive,
but both $A\times K_2$ and $B$ are.
\end{example}

Now we cover the preliminary material needed to prove
our main theorem, Theorem~\ref{Theo:Main}. We begin with the Cartesian product.

\section{The Cartesian Product}
\label{Section:Cartesian}
%Our proof of Conjecture~\ref{Conj:Main} uses the Cartesian product of graphs.
The {\it Cartesian product} of two graphs $A,B\in\Gamma$ is the graph $A\Box B\in\Gamma$ with
vertices $V(A)\times V(B)$ and edges
\[E(A\Box B)=\big\{(a,b)(a',b') \mid
aa'\in E(A) \mbox{ and } b=b', \mbox { or }  a=a' \mbox{ and } bb'\in E(B)\big\}.\]
(See Figure~\ref {Fig:Cartesian}.)
The Cartesian product is commutative and associative in the sense that
$A\Box B\cong B\Box A$ and $A\Box(B\Box C)\cong (A\Box B)\Box C$.
Letting $B+C$ denote the disjoint union of graphs $B$ and $C$, we
also get the distributive law
\begin{equation}
\label{EqnDist}
A\Box(B+C)=A\Box B+A\Box C,
\end{equation}
which is true equality, rather than mere isomorphism.

%%%%%%%%%%%% BEGIN FIGURE 4 %%%%%%%%%%%%%%%
\begin{figure}[H]
\centering
\begin{tikzpicture}[style=thick, scale=1.2]
\def\vr{2pt}
\foreach \y in {0,1,2,3} \draw (1,\y)--(4,\y);
\foreach \x in {0,1,2,3,4} \draw (\x,1)--(\x,3);
\foreach \x in {0,1,2,3,4} \draw (\x,1)..controls +(-0.4,1)..+(0,2);
\foreach \y in {0,1,2,3} \draw (2,\y)..controls +(1,0.4)..+(2,0);
\foreach \x in {1,2,3,4} \draw (\x,0) [fill=white] circle (\vr);
\foreach \y in {1,2,3} \draw (0,\y) [fill=white] circle (\vr);
\foreach \x in {1,2,3,4} \foreach \y in {1,2,3} \draw (\x,\y) [fill=white] circle (\vr);
\draw (4.1,0) node [right] {$A$};
\draw (0,3.1) node [above] {$B$};
\draw (4,3) node [above right] {$A\Box B$};
\end{tikzpicture}
\caption{Cartesian product of graphs.}
\label{Fig:Cartesian}
\end{figure}
%%%%%%%%%%%%% END FIGURE 4 %%%%%%%%%%%%%%%%


Clearly $K_1\Box A\cong A$ for any graph $A$, so $K_1$ is the unit
for the Cartesian product.
A nontrivial graph $G$ is {\it prime over $\Box$} if for any factoring $G\cong A\Box B$,
one of $A$ or $B$ is $K_1$ and the other is $G$. Certainly every finite graph can be
factored into prime factors in $\Gamma$. Sabidussi and Vizing
\cite{SAB,VIZ} proved that this prime factorization is unique for connected graphs.
 % has a unique prime factoring.
%up to order and isomorphism
%of the factors.
More precisely, we have the following.

\begin{theorem}[Theorem 6.8 of \cite{HIK}]
\label{Prop:Cartesian:Iso}
Let $G, H\in\Gamma$ be isomorphic connected graphs with
$G=G_1\Box \cdots\Box G_k$ and $H=H_1\Box\cdots\Box H_\ell$, where
the factors $G_i$ and $H_i$ are prime.
Then $k=\ell$, and for any isomorphism $\varphi:G\to H$, there is a permutation
$\pi$ of $\{1,2,\ldots,\ell\}$ and isomorphisms $\varphi_i:G_{\pi(i)}\to H_i$
for which
\[\varphi(x_1,x_2,\ldots,x_\ell)=
\big(\varphi_1(x_{\pi(1)}),  \varphi_2(x_{\pi(2)}), \ldots,
\varphi_\ell(x_{\pi(\ell)})\big).\]
\end{theorem}

Notice that cancellation is a consequence of unique prime factorization:
For connected graphs, $A\Box C\cong B\Box C$ implies $A\cong B$.
(Cancellation holds also for disconnected graphs, but we shall not need
this stronger result.)

\pagebreak

%+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
\section{$R$-Thin Graphs}
\label{Section:Thin}
The notion of so-called $R$-thinness is an important issue in
factorings over the direct product. McKenzie~\cite{FACTOR} uses this idea
(in a somewhat more general form), citing an earlier
use by Chang~\cite{CHANG}. In other contexts, $R$-thin graphs have been
called {\it worthy} graphs~\cite{WILSON}.
See Chapter 8 of~\cite{HIK} for proofs of the assertions made in this section.

A graph $G$ is $R$-{\it thin} if no two vertices have the same open neighborhood,
that is, if $N_G(x)=N_G(y)$ implies $x=y$. Said differently, any vertex is uniquely
determined by its open neighborhood.

More generally, we form a relation $R$ on the vertices of an arbitrary graph.
Two vertices $x$ and $y$ of $G$ are
in {\em relation $R$}, written $xRy$, precisely if their open neighborhoods are
identical, that is, if $N_G(x)=N_G(y)$. It is easy
to check that $R$ is an equivalence relation on
$V(G)$.

%(For example,  the equivalence classes for $A\times B$
%in Figure~\ref{Fig:Layers}
%are $\{b1\}$, $\{b2\}$, $\{a1, c1\}$ and $\{a2, c2\}$; those of $A$ are $\{a,c\}$ and $\{b\}$,
%and those of $B$ are $\{1\}$ and $\{2\}$.)
%A graph is then $R$-thin if and only if each equivalence class contains exactly one vertex.

%+++++++++++++++++++++++++
%\begin{figure}[t]
%\centering
%\begin{tikzpicture}[scale=0.7,style=thick]
%\def\vr{2.8pt} % \vr = vertex radius;  Set \vr = 2/scale for uniform sizing of vertices
%\def\sr{2.5pt}
%\foreach \x in {0,8} \draw  (\x,1)--+(0,2);
%\foreach \x in {1,3} \draw   (\x,-0.1)--+(2,0);
%\foreach \x in {1,3} \draw (\x,1)--+(2,2);
%\foreach \x in {1,3} \draw  (\x,3)--+(2,0);
%\foreach \x in {1,3} \draw  (\x,3)--+(2,-2);
%\draw (0,3)..controls +(-1,1) and +(1,1)..+(0,0);
%\draw (8,3)..controls +(-1,1) and +(1,1)..+(0,0);
%\draw (9.8,-0.1)--+(2,0) (9.8,1)--+(2,2)--+(0,2)--+(2,0);
%\foreach \x in {0,1,3,5, 8, 9.8,11.8} \foreach \y in {1,3} \draw (\x,\y) [fill=white] circle (\vr);
%\foreach \x in {1,3,5, 9.8,11.8}   \draw (\x,-0.1) [fill=white] circle (\vr);
%{\footnotesize
%\draw (1,3.5) node {$a2$}  (3,3.5) node {$b2$} (5,3.5) node {$c2$};
%\draw (1,0.6) node {$a1$}  (3,0.6) node {$b1$} (5,0.6) node {$c1$};
%\draw (1,-0.5) node {$a$}  (3,-0.5) node {$b$} (5,-0.5) node {$c$};
%\draw (-0.4,3) node {$2$} (8,3)  node [left] {$\{2\}$};
%\draw (-0.4,1) node {$1$} (8,1)  node [left] {$\{1\}$};
%\draw (9.8,-0.1) node [left] {$\{b\}$} (11.8,-0.1) node [right] {$\{a,c\}$};
%\draw (9.8,1) node [left] {$\{b1\}$} (11.8,1) node [right] {$\{a1,c1\}$};
%\draw (9.8,3) node [left] {$\{b2\}$} (11.8,3) node [right] {$\{a2,c2\}$};
%}
%{\small
%\draw (0,4.2) node {$B$} (8,4.2) node {$B/R$};
%\draw (5.1,-0.1) node [right] {$A$} (13.1,-0.1) node [right] {$A/R$};
%\draw  (10.6,4.2) node [right] {$(A\times B)/R$};
%\draw  (3.8,4.2) node [right] {$A\times B$};
%}
%\end{tikzpicture}
%\caption{Graphs $A,B$ and $A\times B$ (left), and quotients $A/R, B/R$ and
%$(A\times B)/R$ (right).}
%\label{Fig:Layers}
%\end{figure}
%+++++++++++++++++++++++++

%This transposition is possible because
%$a2$ and $c2$ have the same neighborhood.
%Evidently, then, vertices with identical neighborhoods complicate the
%discussion of prime factorizations over the direct product. To overcome this difficulty,

An $R$-equivalence class of a graph is called an
{\bf R-class}.
Given two $R$-classes $X$ and $Y$ (not necessarily distinct), it
is easy to check that either every vertex in $X$ is adjacent to every vertex in $Y$,
or no vertex in $X$ is adjacent to any in $Y$. In particular, this means that
an $R$-class $X$ of $G$ induces either a complete subgraph
$K_n^*$ or a totally disconnected subgraph
$\overline{K_n^*}$.

As the relation $R$ is defined entirely in terms of adjacencies, it is clear that given
an isomorphism $\varphi:G\to H$ we have $xRy$ in $G$ if and only if
$\varphi(x)R\,\varphi(y)$ in $H$.
Thus $\varphi$ maps $R$-classes of~$G$ bijectively to $R$-classes of $H$.

Take any vertex $x$ of a vertex-transitive graph $G$, and say $x$
belongs to the $R$-class $X$. Because an automorphism
$\varphi$ of $G$ carries $R$-classes to $R$-classes, the $R$-class containing
$\alpha(x)$ has $|X|$ vertices. Thus, by vertex-transitivity, all $R$-classes of $G$ have size $|X|$.

Given a graph $G$, we define a quotient graph $G/R$ (in $\Gamma_0$)
whose vertex set is the
set of  $R$-classes of $G$,
and for which two classes are adjacent if they are joined by an edge of $G$.
(And a single class carries a loop provided that an edge of $G$ has both
endpoints in that class.)
%Figure~\ref{Fig:Layers} shows quotients $A/R$, $B/R$ and $(A\times B)/R$.
If $G$ is $R$-thin, then $G/R\cong G$.
An easy check confirms that $G/R$ is $R$-thin for any $G\in\Gamma_0$.

Any automorphism $\varphi:G\to G$ induces an automorphism $G/R\to G/R$
defined as $X\mapsto \varphi(X)$. Conversely, if all $R$-classes have the same
size, then we can lift any automorphism $\varphi:G/R\to G/R$ to an automorphism
of $G$ by simply declaring that each $R$-class $X$ maps to $\alpha(X)$ by an
arbitrary bijection. Moreover, if $x$ and $y$ are two
vertices in the same $R$-class, then
transposition of $x$ with $y$ is an
automorphism of $G$. This implies a lemma.

\begin{lemma}
\label{Lemma:Thin}
If a graph $G$ is vertex-transitive, then $G/R$ is vertex-transitive. If
$G/R$ is vertex-transitive, and all $R$-classes of $G$ have the same size, then $G$
is vertex-transitive.
\end{lemma}

Because $N_{A\times B}(a,b)=N_A(a)\times N_B(b)$, it follows that the $R$-classes
of $A\times B$ are precisely the sets $X\times Y$, where $X$ is an $R$-class of $A$,
and $Y$ is an $R$-class of $B$.  In fact, the map $(A\times B)/R\to A/R\times B/R$
given by $X\times Y\mapsto (X,Y)$ is an isomorphism,
as is proved in Section~8.2 of~\cite{HIK}. Thus
$A\times B$ is $R$-thin if and only if both $A$ and $B$ are.

Note also that $G$ is bipartite if and only if $G/R$ is
bipartite. 

We will use these ideas frequently, sometimes without comment.

%+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
\section{The Cartesian Skeleton}
\label{Section:Skeleton}
We now recall the definition of the Cartesian skeleton $S(G)$ of an arbitrary graph $G$ in
$\Gamma_0$. The Cartesian skeleton $S(G)$ is a graph on the vertex set of $G$ that has the property $S(A\times B)=S(A)\Box S(B)$ in the class of $R$-thin graphs,
thereby linking the direct and Cartesian products.

We construct $S(G)$ as a certain subgraph of the Boolean square of $G$.
The {\bf Boolean square}  of  $G$ is the graph $G^s$ with $V(G^s)=V(G)$
and $E(G^s)=\{xy \mid  N_G(x)\cap N_G(y)\neq \emptyset\}$. Thus, $xy$ is an edge of $G^s$
whenever $G$ has an $x,y$-walk of length two.
%For instance, if $p\geq 3$, then
%$K_p^s=K_p^*$ (i.e., $K_p$ with a loop added to each vertex). Also, $K_2^s=K_1^*+K_1^*$
%and $K_1^s=K_1$.
The left side of Figure~\ref{Fig:Skel} shows graphs $A,B$ and $A\times B$ (bold)
together with their Boolean squares $A^s,B^s$ and $(A\times B)^s$ (dotted).


If $G$ has an $x,y$-walk $W$ of even length, then $G^s$ has an $x,y$-walk
of length $|W|/2$ on alternate vertices of $W$. Thus $G^s$ is connected if
$G$ is connected and has an odd cycle. (An odd cycle guarantees an even
walk between any two vertices.) On the other hand, if $G$ is connected and bipartite, then
$G^s$ has exactly two components, whose vertex sets are the two partite sets of $G$.

We now show how to form $S(G)$ as a certain spanning subgraph of $G^s$.
Consider an arbitrary factorization $G\cong A\times B$, by which we
identify each vertex of $G$ with an ordered pair $(a,b)$.
We say that an edge $(a,b)(a',b')$ of $G^s$ is
{\bf Cartesian}
relative to the factorization $A\times B$ if either $a=a'$ and $b\ne b'$, or
$a\ne a'$ and $b=b'$. For example, in Figure~\ref{Fig:Skel} edges $xz$ and $zy$ of $G^s$
are Cartesian (relative to the factorization $A\times B$), but edges $xy$ and $yy$ of
$G^s$ are not Cartesian.
We make $S(G)$ from $G^s$ by removing the edges of $G^s$ that are not
Cartesian, but we do this in a way that does not reference the factoring $A\times B$ of $G$.
We next identify two intrinsic criteria for a non-loop edge of
$G^s$ that tell us if
it may fail to be Cartesian relative to some factoring of~$G$.


%%%%%%%%%%%%% BEGIN FIGURE 5 %%%%%%%%%%%%%%%%
\begin{figure}[bh]
\centering
\begin{tikzpicture}[scale=1.5, style=thick]
\def\vr{1.4pt} % \vr = vertex radius;  Set \vr = 2/scale for uniform sizing of vertices
\draw [line width=1pt] (4,1)--(2,3)--(1,2)--(2,1)--(4,3) (1,3)--(3,1)--(4,2)--(3,3)--(1,1);
\draw  [line width=1pt] (1,3)--(2,4)--(3,3)--(4,4);
\draw  [line width=1pt] (1,4)--(2,3)--(3,4)--(4,3);
\draw  [line width=1pt] (0,1)--(0,4) node [above] {$B$}  (1,0)--(4,0) node [right] {$A$};
\foreach \y in {0,1,2,3,4}
\draw [densely dotted] (1,\y) .. controls +(1,-.3) .. (3,\y) (2,\y) .. controls +(1,-.3) .. (4,\y);
\foreach \x in {0,1,2,3,4} \foreach \y in {1,2}
\draw [densely dotted] (\x,\y) .. controls +(-0.4,1) .. (\x,\y+2);
\foreach \x in {1,2} \foreach \y in {1,2} \draw [densely dotted] (\x,\y).. controls +(0.7,1.3)..(\x+2,\y+2);
\foreach \x in {1,2} \foreach \y in {3,4} \draw [densely dotted] (\x,\y).. controls +(0.7,-1.3)..(\x+2,\y-2);
{\footnotesize
\draw (1.05,1.1) node [right] {$x'$} (2.05,1.05) node [right] {$x$}
(1.25,3.1) node {$z'$}  (2.2,3.05) node {$z$} (3.25,3.05) node {$y'$}  (4.2,3.05) node {$y$};
\foreach \x in {1,2,3,4} \foreach \y in {0,3,4} \draw [densely dotted] (\x,\y)..controls +(-.3,.5) and +(.3,.5)..(\x,\y);
\foreach \x in {1,2,3,4} \foreach \y in {1,2} \draw [densely dotted] (\x,\y)..controls +(-.3,-.5) and +(.3,-.5)..(\x,\y);
}
\foreach \y in {1,2,3,4} \draw [densely dotted] (0,\y)..controls +(.5,-.4) and +(.5,.4)..(0,\y);
\foreach \x in {0,1,2,3,4} \foreach \y in {1,2,3,4} \draw (\x, \y) [line width=1pt, fill=white] circle (\vr);
\foreach \x in {1,2,3,4}  \draw (\x, 0) [line width=1pt, fill=white] circle (\vr);
\end{tikzpicture}
\hspace{0.25cm}
\begin{tikzpicture}[style=thick, scale=1.5]
\def\vr{1.4pt} % \vr = vertex radius;  Set \vr = 2/scale for uniform sizing of vertices
\draw [line width=1pt] (4,1)--(2,3)--(1,2)--(2,1)--(4,3) (1,3)--(3,1)--(4,2)--(3,3)--(1,1);
\draw  [line width=1pt] (1,3)--(2,4)--(3,3)--(4,4);
\draw  [line width=1pt] (1,4)--(2,3)--(3,4)--(4,3);
\draw  [line width=1pt] (0.3,1)--(0.3,4) node [above] {$B$}  (1,0)--(4,0) node [right] {$A$};
\foreach \y in {0,1,2,3,4}
\draw [densely dotted] (1,\y) .. controls +(1,-.3) .. (3,\y) (2,\y) .. controls +(1,-.3) .. (4,\y);
\foreach \x in {0.3,1,2,3,4} \foreach \y in {1,2}
\draw [densely dotted] (\x,\y) .. controls +(-0.4,1) .. (\x,\y+2);
\foreach \x in {0.3,1,2,3,4} \foreach \y in {1,2,3,4} \draw (\x, \y) [line width=1pt, fill=white] circle (\vr);
\foreach \x in {1,2,3,4}  \draw (\x, 0) [line width=1pt, fill=white] circle (\vr);
\end{tikzpicture}
\caption{Left: graphs $A, B, A\!\times\! B$ and their Boolean squares $A^s, B^s$ and
$(\!A\!\times\!B)^s$ (dotted).
Right: graphs $A, B, A\times B$ and their Cartesian skeletons $S(A), S(B)$ and
$S(A\times B)$ (dotted).}
\label{Fig:Skel}
\end{figure}
%%%%%%%%%%%%% END FIGURE 5 %%%%%%%%%%%%%%%%

\pagebreak
%(Thus the adjacency matrix of $G^s$
%is the Boolean second power of the adjacency matrix of $G$, that is,
%if $G$ has adjacency matrix $A$
%then the matrix of $G^s$ is obtained from $A^2$ by replacing each nonzero entry by 1.)

The criteria are as follows.
(Note that the symbol $\subset$ means {\it proper} inclusion, and the neighborhoods
are neighborhoods of $G$, not $G^s$.)
\begin{itemize}
%\item[(i)] If $xy$ is a loop (i.e. if $x=y$) then $xy$ cannot be Cartesian.
\item[(i)] In Figure \ref{Fig:Skel}, the edge $xy$ of $G^s$ is not Cartesian, and
there is a $z\in V(G)$ with $N_G(x)\cap N_G(y)\subset N_G(x)\cap N_G(z)$ and
$N_G(x)\cap N_G(y)\subset N_G(y)\cap N_G(z)$.
\item[(ii)] In Figure \ref{Fig:Skel}, the edge $x'y'$ of $G^s$ is not Cartesian, and
there is a $z'\in V(G)$ with $N_G(x')\subset N_G(z')\subset N_G(y')$.
\end{itemize}
Our aim is to remove from $G^s$ all edges that meet one of these criteria.
We package the above criteria into the following definition.
An edge $xy$ of $G^s$ is {\bf dispensable} if
$x=y$ or there exists $z\in V(G)$ for which both of the following statements hold.
\begin{itemize}
\item[{(1)}] $N_G(x)\cap N_G(y)\subset N_G(x)\cap N_G(z)\;\;$ or $
\;\;N_G(x)\subset N_G(z)\!\subset N_G(y)$,
\item[{(2)}] $N_G(y)\cap N_G(x)\subset N_G(y)\cap N_G(z)\;\;$ or $\;\;N_G(y)\subset N_G(z)\!\subset N_G(x)$.
\end{itemize}

Observe that the above statements (1) and (2) are symmetric in $x$ and~$y$.
It is easy to check that (i) or (ii) holding for a triple $x,y,z$ is equivalent to
{\it both} of (1) and (2) holding.
Now we come to this section's main definition.
The {\bf Cartesian skeleton} of a graph $G$ is the spanning subgraph $S(G)$ of $G^s$
obtained by removing all dispensable edges from $G^s$.

The right side of Figure \ref{Fig:Skel} is the same as its left side, except all dispensable
edges of $A^s, B^s$ and $(A\times B)^s$ are deleted. Thus the remaining dotted
edges are $S(A), S(B)$ and $S(A\times B)$. Note that although
$S(G)$ was defined without regard to the  factorization $G=A\times B$, we
nonetheless have $S(A\times B)=S(A) \Box S(B)$.
The following  proposition from~\cite{HAMIMR} asserts that this always holds for
$R$-thin graphs.

\begin{proposition}
\label{Prop:Prod}
If $A,B$ are $R$-thin graphs, then $S(A\times B)=S(A)\Box S(B)$,
provided that neither $A$ nor $B$ has any isolated vertices.
This is {\it equality}, not mere isomorphism; the graphs
$S(A\times B)$ and $S(A)\Box S(B)$ have identical vertex and edge sets.
\end{proposition}

As $S(G)$ is defined entirely in terms of the adjacency structure
of $G$, we have the following immediate consequence.% of Definition~\ref{DefSkeleton}.

\begin{proposition}
\label{Prop:SkelMap}
Any isomorphism $\varphi : G\to H$, as a map $V(G)\to V(H)$, is also an isomorphism
$\varphi:S(G)\to S(H)$.
\end{proposition}

We will also need a result concerning connectivity of Cartesian skeletons.
The following result (which does not require $R$-thinness) is from~\cite{HAMIMR}.
(For another proof, see Chapter 8 of~\cite{HIK}.)

\begin{proposition}
\label{Prop:Conn}
Suppose $G$ is connected.
\begin{itemize}
\item[{\em (i)}] If $G$ has an odd cycle, then $S(G)$ is connected.
\item[{\em (ii)}] If $G$ is nontrivial bipartite, then $S(G)$ has two connected
components. Their respective vertex sets
are the two partite sets of $G$.
\end{itemize}
\end{proposition}


%+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
\section{Main Result}
\label{Section:Main}

The next proposition is the technical heart of this paper, and uses the
material of the previous three sections. It is followed
by (and implies part of)
Theorem~\ref{Theo:Main}, our main characterization
of vertex-transitive direct products.

\begin{proposition}
\label{Prop:ThinVT}
Let $A$ and $B$ be $R$-thin, connected graphs,
either both non-bipartite, or both bipartite.
If $A\times B$ is vertex-transitive, then both $A$ and $B$
are vertex-transitive.
\end{proposition}

\begin{proof}
The proof has two parts.
Part 1 proves the result  assuming that
$A$ and $B$ are both non-bipartite.
Part 2 proves it if they are both bipartite.
%Though Part 2 has been previously established (for instance, Theorem 8.19 of~\cite{HIK}),
%it is reproved here (with a new approach) for completeness and self-containment. However, the
%reader who is familiar with the non-bipartite scenario may choose to skip Part 2.

\medskip
\noindent
{\bf Part 1.} Assume $A$ and $B$ are non-bipartite and
$A\times B$ is vertex-transitive. We will prove that $A$ is
vertex-transitive. (The same reasoning works for $B$.)
In what follows, $a,a'\in V(A)$ are two arbitrary vertices.
We will construct  $\theta\in \mbox{Aut}(A)$ with $\theta(a)=a'$.

Fix $b\in V(B)$ and select $\varphi\in \mbox{Aut}(A\times B)$ with $\varphi(a,b)=(a',b)$.
By Proposition~\ref{Prop:SkelMap}, $\varphi$ is also an automorphism
$\varphi: S(A\times B)\to S(A\times B)$, and by Proposition~\ref{Prop:Prod}, it
is an automorphism $\varphi: S(A)\Box S(B)\to S(A)\Box S(B)$.
By Proposition~\ref{Prop:Conn}, $S(A)$ and $S(B)$ are connected.
Form prime factorizations $S(A)=G_1\Box\cdots \Box G_k$, and
$S(B)=G_{k+1}\Box\cdots \Box G_\ell$, so
\begin{eqnarray*}
S(A)\Box S(B) &=&
\overbrace{G_1\Box\cdots \Box G_k}^{S(A)}\;\Box\; \overbrace{G_{k+1}\Box\cdots \Box G_\ell}^{S(B)},
\end{eqnarray*}
We've now coordinatized $S(A)$ and $S(B)$ (hence also $A$ and $B$) so that
\[\begin{array}{ccc}
a=(a_{1},\ldots, a_{k}), &
a'=(a_{1}',\ldots, a_{k}'), &
b=\,(b_{k+1},\ldots, b_\ell)
\end{array}\]
for tuples of $a_{i}, a_{i}'\in V(G_i)$ ($1\leq i\leq k$), and $b_i\in V(G_i)$
($k+1\leq i\leq \ell$). Furthermore,
$\varphi$ is
\begin{eqnarray}
\label{Map:Phi2}
\varphi:(G_1\Box\cdots \Box G_k)\Box ( G_{k+1}\Box\cdots \Box G_\ell)&\longrightarrow &(G_1\Box\cdots \Box G_k)\Box ( G_{k+1}\Box\cdots \Box G_\ell.)
\end{eqnarray}

%\[\begin{array}{cccc}
%a=(a_{1}, a_{2},\ldots, a_{k}), &
%a'=(a_{1}', a_{2}',\ldots, a_{k}'), & \mbox{and} &
%b=\,(b_{k+1},\ldots, b_\ell).
%\end{array}\]
%for tuples of $a_{i}, a_{i}'\in V(G_i)$ ($1\leq i\leq k$), and $b_i\in V(G_j)$
%($k+1\leq i\leq \ell$).

Theorem~\ref{Prop:Cartesian:Iso} applied to
(\ref{Map:Phi2}) says there is a permutation
$\pi$  of $\{1,2,\ldots,\ell\}$ and isomorphisms
$\varphi_{i}:G_{\pi(i)}\to G_i$ for which
\begin{equation}
\label{Map:Phi3}
\mbox{\small
$\!\!\varphi\big((x_1,\ldots, x_k),(x_{k+1},\ldots,x_\ell)\big)=
\Big(\!\big(\varphi_{1}(x_{\pi(1)}),\ldots, \varphi_{k}(x_{\pi(k)})\big),
\big(\varphi_{k+\!1}(x_{\pi(k+\!1)}),\ldots,
\varphi_{\ell}(x_{\pi(\ell)})\big)\!\Big).$}
\end{equation}
If we are lucky, then $\pi$ permutes the
indices $\{1,\ldots, k\}$ among themselves,
and $\{k+1,\ldots, \ell\}$ among {\it themselves}. We can then define
$\theta:A\to A$, as
\begin{eqnarray*}
\theta(x_1,\ldots, x_k)&=&
\big(\varphi_{1}(x_{\pi(1)}),\,  \varphi_{2}(x_{\pi(2)}), \,\ldots,
\varphi_{k}(x_{\pi(k)})\big).
\end{eqnarray*}
It is easy to check (and is proved below)
that $\theta$ is an automorphism of
$A$ sending
$a=(a_{1},\ldots, a_{k})$ to
$a'=(a_{1}',\ldots, a_{k}')$,
as desired.

But in general, $\pi$ will not respect the indices like this,
and constructing~$\theta$ involves more care.
In general, if we decompose $\pi$ into disjoint
cycles, some of them may interchange some indices in
$\{1,\ldots, k\}$ with those in
$\{k+1,\ldots, \ell\}$.
Figure~\ref{Fig:ThetaPhi} illustrates a typical such cycle
$\big(i\;\pi^{\,}(i)\;\pi^2(i)\,\ldots \,\pi^6(i)\big)$ of length 7.

%%%%%%%%%%%%% BEGIN FIGURE 6 %%%%%%%%%%%%%%%%
\begin{figure}[H]
\centering
\begin{tikzpicture}[scale=1.08,style=thick]
\foreach \x in {0,1,3,5,8,9,11} \draw [fill=lightgray,color=lightgray] (\x,0)--+(0,-0.5)--+(1,-0.5)--+(1,0);
\draw (0,0)--(7,0)--(7,-0.5)--(0,-0.5)--(0,0);
\draw (7.1,0)--(12,0)--(12,-0.5)--(7.1,-0.5)--(7.1,0);
\foreach \x in {1,2,3,4,5,6,8,9,10,11} \draw (\x,0)--+(0,-0.5);
\foreach \x in {2.5,4.5, 6.5,7.5,10.5} \draw (\x,-0.25) node {$\cdots$};
{\small
%\draw (-0.5,-0.25) node {$\varphi$};
\draw (0.5,-0.25) node {$G_{\pi^4(i)}$};
\draw (5.5,-0.25) node {$G_i$};
\draw (8.5,-0.25) node {$G_{\pi(i)}$};
\draw (9.5,-0.25) node {$G_{\pi^{2}(i)}$};
\draw (11.5,-0.25) node {$G_{\pi^{3}(i)}$};

\draw (1.5,-0.25) node {$G_{\pi^{5}(i)}$};
\draw (3.5,-0.25) node {$G_{\pi^{6}(i)}$};
\draw (1.4,0.65) node {\small $\varphi_{\pi^{4}(i)}$};
\draw (2.6,0.75) node {$\varphi_{\pi^{5}(i)}$};
\draw (4.6,0.75) node {$\varphi_{\pi^{6}(i)}$};
\draw (7.1,0.75) node {$\varphi_{i}$};
\draw (8.9,0.75) node {$\varphi_{\pi^{\,}(i)}$};
\draw (10.1,0.75) node {$\varphi_{\pi^{2}(i)}$};
\draw (6,1.15) node {$\varphi_{\pi^{3}(i)}$};
}
\draw [<-,>=stealth] (0.6,0.02)..controls +(0,0.65) and +(0,0.65)..(1.4,0.0);
\draw [<-,>=stealth] (1.6,0.02)..controls +(0,0.7) and +(0,0.7)..(3.4,0.0);
\draw [<-,>=stealth] (3.6,0.02)..controls +(0,0.7) and +(0,0.7)..(5.4,0.0);
\draw [<-,>=stealth] (5.6,0.02)..controls +(0,0.7) and +(0,0.7)..(8.4,0.0);
\draw [<-,>=stealth] (8.6,0.02)..controls +(0,0.65) and +(0,0.65)..(9.4,0.0);
\draw [<-,>=stealth] (9.6,0.02)..controls +(0,0.7) and +(0,0.7)..(11.4,0.0);
\draw [<-,>=stealth] (11.6,0.02)..controls +(0,1.7) and +(0,2.05)..(0.25,0.0);

\draw (3.5,-0.9) node {$\underbrace{\mbox{\rule{2.9in}{0in}}}_{S(A)}$};
\draw (9.5,-0.9) node {$\underbrace{\mbox{\rule{2in}{0in}}}_{S(B)}$};

\end{tikzpicture}
\caption{Effect of $\varphi$ on coordinates corresponding to a typical cycle of $\pi$
}
\label{Fig:ThetaPhi}
\end{figure}
%%%%%%%%%%%%% END FIGURE 6 %%%%%%%%%%%%%%%%

To control such mixing of coordinates, we define a {\bf stacking operation} on
$V(A\times B)$.  This  will allow us to modify $\varphi$ so
its $S(A)$ coordinate functions do not depend on  the factors of $S(B)$.
The idea, introduced in~\cite{HAMMACK3},
takes an input vertex $(x,y_0)\in V(A\times B)$, applies~$\varphi$
to get $\varphi(x,y_0)=(x_1,y_1)$, then
replaces the $x_1$ with $x$.
\[\begin{array}{llllll}
\multicolumn{6}{l}{\mbox{{\bf Stacking Operation}}}  \\
& 0. & \mbox{Begin with input vertex} &
(x,\;y_0)& \in & V\big(S(A)\Box S(B)\big)=V(A\times B) \\
& 1. &  \mbox{Apply $\varphi$:} &
(x_1,y_1)& \in & V\big(S(A)\Box S(B)\big)=V(A\times B)\\
& 2. &  \mbox{Replace $x_1$ with $x$:} &
(x,\;y_1)& \in & V\big(S(A)\Box S(B)\big)=V(A\times B) \\
\end{array}\]

The stacking operation sends a vertex $(x,y_0)$ to an output~$(x,y_1)$,
to which we can again apply the stacking operation to get
$(x,y_2)$, etc. This process yields a sequence
\begin{equation}
(x,y_0), (x,y_1), (x,y_2), (x,y_3),\ldots .
\label{Sequence}
\end{equation}
For example, let's trace this sequence with $x=(r,s,\ldots, t, \ldots, u,\ldots)$, and where we
only consider those coordinates which correspond to the cycle of $\pi$ indicated in
Figure~\ref{Fig:ThetaPhi}. The first five terms are as
follows, where for typographical efficiency we use the abbreviations
$\bar{r}=\varphi_{\pi^3(i)}(r)$ as well as $\bar{\bar{r}}=\varphi_{\pi^2(i)}\circ\varphi_{\pi^3(i)}(r)$
and $\bar{\bar{\bar{r}}}=\varphi_{\pi(i)}\circ\varphi_{\pi^2(i)}\circ\varphi_{\pi^3(i)}(r)$.


%%%%%%%%%%%%% BEGIN FIGURE 7 %%%%%%%%%%%%%%%%
\begin{figure}[H]
\centering
\begin{tikzpicture}[scale=1,style=thick]
\foreach \x in {0,1,3,5,8,9,11} \draw [fill=lightgray,color=lightgray] (\x,0)--+(0,-0.5)--+(1,-0.5)--+(1,0);
\draw (0,0)--(7,0)--(7,-0.5)--(0,-0.5)--(0,0);
\draw (7.1,0)--(12,0)--(12,-0.5)--(7.1,-0.5)--(7.1,0);
\foreach \x in {1,2,3,4,5,6,8,9,10,11} \draw (\x,0)--+(0,-0.5);
\foreach \x in {2.5,4.5, 6.5,7.5,10.5} \draw (\x,-0.25) node {$\cdots$};
{\small
\draw (-1.8,-0.25) node [right, white] {$(u_0,v_0)=$};
\draw (2.6,0.75) node {$\varphi_{\pi^{5}(i)}$};
\draw (4.6,0.75) node {$\varphi_{\pi^{6}(i)}$};
\draw (7.1,0.75) node {$\varphi_{i}$};
\draw (8.8,0.75) node {$\varphi_{\pi^{\,}(i)}$};
\draw (10.1,0.75) node {$\varphi_{\pi^{2}(i)}$};
\draw (5.9,1.1) node {$\varphi_{\pi^{3}(i)}$};
}
\draw [<-,>=stealth] (0.6,0.02)..controls +(0,0.65) and +(0,0.65)..(1.4,0.0);
\draw [<-,>=stealth] (1.6,0.02)..controls +(0,0.7) and +(0,0.7)..(3.4,0.0);
\draw [<-,>=stealth] (3.6,0.02)..controls +(0,0.7) and +(0,0.7)..(5.4,0.0);
\draw [<-,>=stealth] (5.6,0.02)..controls +(0,0.7) and +(0,0.7)..(8.4,0.0);
\draw [<-,>=stealth] (8.6,0.02)..controls +(0,0.65) and +(0,0.65)..(9.4,0.0);
\draw [<-,>=stealth] (9.6,0.02)..controls +(0,0.7) and +(0,0.7)..(11.4,0.0);
\draw [<-,>=stealth] (11.6,0.02)..controls +(0,1.75) and +(0,1.9)..(0.4,0.0);
\end{tikzpicture}

\noindent
\begin{tikzpicture}[scale=1,style=thick]
\foreach \x in {0,1,3,5,8,9,11} \draw [fill=lightgray,color=lightgray] (\x,0)--+(0,-0.5)--+(1,-0.5)--+(1,0);
\draw (0,0)--(7,0)--(7,-0.5)--(0,-0.5)--(0,0);
\draw (7.1,0)--(12,0)--(12,-0.5)--(7.1,-0.5)--(7.1,0);
\foreach \x in {1,2,3,4,5,6,8,9,10,11} \draw (\x,0)--+(0,-0.5);
\foreach \x in {2.5,4.5, 6.5,7.5,10.5} \draw (\x,-0.25) node {$\cdots$};
{\small
\draw (-1.8,-0.25) node [right] {$(x,y_0)=$};
\draw (0.5,-0.25) node {$r$};
\draw (5.5,-0.25) node {$u$};
\draw (8.5,-0.25) node {$*$};
\draw (9.5,-0.25) node {$*$};
\draw (11.5,-0.25) node {$*$};
\draw (1.5,-0.25) node {$s$};
\draw (3.5,-0.25) node {$t$};
}
\end{tikzpicture}

\noindent
\begin{tikzpicture}[scale=1,style=thick]
\foreach \x in {0,1,3,5,8,9,11} \draw [fill=lightgray,color=lightgray] (\x,0)--+(0,-0.5)--+(1,-0.5)--+(1,0);
\draw (0,0)--(7,0)--(7,-0.5)--(0,-0.5)--(0,0);
\draw (7.1,0)--(12,0)--(12,-0.5)--(7.1,-0.5)--(7.1,0);
\foreach \x in {1,2,3,4,5,6,8,9,10,11} \draw (\x,0)--+(0,-0.5);
\foreach \x in {2.5,4.5, 6.5,7.5,10.5} \draw (\x,-0.25) node {$\cdots$};
{\small
\draw (-1.8,-0.25) node [right] {$(x,y_1)=$};
\draw (0.5,-0.25) node {$r$};
\draw (1.5,-0.25) node {$s$};
\draw (3.5,-0.25) node {$t$};
\draw (5.5,-0.25) node {$u$};
\draw (8.5,-0.25) node {$*$};
\draw (9.5,-0.25) node {$*$};
\draw (11.5,-0.25) node {$\bar{r}$};
}
\end{tikzpicture}

\noindent
\begin{tikzpicture}[scale=1,style=thick]
\foreach \x in {0,1,3,5,8,9,11} \draw [fill=lightgray,color=lightgray] (\x,0)--+(0,-0.5)--+(1,-0.5)--+(1,0);
\draw (0,0)--(7,0)--(7,-0.5)--(0,-0.5)--(0,0);
\draw (7.1,0)--(12,0)--(12,-0.5)--(7.1,-0.5)--(7.1,0);
\foreach \x in {1,2,3,4,5,6,8,9,10,11} \draw (\x,0)--+(0,-0.5);
\foreach \x in {2.5,4.5, 6.5,7.5,10.5} \draw (\x,-0.25) node {$\cdots$};
{\small
\draw (-1.8,-0.25) node [right] {$(x,y_2)=$};
\draw (0.5,-0.25) node {$r$};
\draw (1.5,-0.25) node {$s$};
\draw (3.5,-0.25) node {$t$};
\draw (5.5,-0.25) node {$u$};
\draw (8.5,-0.25) node {$*$};
\draw (9.5,-0.25) node {$\bar{\bar{r}}$};
\draw (11.5,-0.25) node {$\bar{r}$};
}
\end{tikzpicture}

\noindent
\begin{tikzpicture}[scale=1,style=thick]
\foreach \x in {0,1,3,5,8,9,11} \draw [fill=lightgray,color=lightgray] (\x,0)--+(0,-0.5)--+(1,-0.5)--+(1,0);
\draw (0,0)--(7,0)--(7,-0.5)--(0,-0.5)--(0,0);
\draw (7.1,0)--(12,0)--(12,-0.5)--(7.1,-0.5)--(7.1,0);
\foreach \x in {2.5,4.5, 6.5,7.5,10.5} \draw (\x,-0.25) node {$\cdots$};
\foreach \x in {1,2,3,4,5,6,8,9,10,11} \draw (\x,0)--+(0,-0.5);
{\small
\draw (-1.8,-0.25) node [right] {$(x,y_3)=$};
\draw (0.5,-0.25) node {$r$};
\draw (1.5,-0.25) node {$s$};
\draw (3.5,-0.25) node {$t$};
\draw (5.5,-0.25) node {$u$};
\draw (8.5,-0.25) node {$\bar{\bar{\bar{r}}}$};
\draw (9.5,-0.25) node {$\bar{\bar{r}}$};
\draw (11.5,-0.25) node {$\bar{r}$};
}
\end{tikzpicture}

\noindent
\begin{tikzpicture}[scale=1,style=thick]
\foreach \x in {0,1,3,5,8,9,11} \draw [fill=lightgray,color=lightgray] (\x,0)--+(0,-0.5)--+(1,-0.5)--+(1,0);
\draw (0,0)--(7,0)--(7,-0.5)--(0,-0.5)--(0,0);
\draw (7.1,0)--(12,0)--(12,-0.5)--(7.1,-0.5)--(7.1,0);
\foreach \x in {2.5,4.5, 6.5,7.5,10.5} \draw (\x,-0.25) node {$\cdots$};
\foreach \x in {1,2,3,4,5,6,8,9,10,11} \draw (\x,0)--+(0,-0.5);
{\small
\draw (-1.8,-0.25) node [right] {$(x,y_4)=$};
\draw (0.5,-0.25) node {$r$};
\draw (1.5,-0.25) node {$s$};
\draw (3.5,-0.25) node {$t$};
\draw (5.5,-0.25) node {$u$};
\draw (8.5,-0.25) node {$\bar{\bar{\bar{r}}}$};
\draw (9.5,-0.25) node {$\bar{\bar{r}}$};
\draw (11.5,-0.25) node {$\bar{r}$};
}
\end{tikzpicture}
\caption{The effect of the stacking operation on a typical cycle of $\pi$.}
\label{Fig:Iterations}
\end{figure}
%%%%%%%%%%%%% END FIGURE 7 %%%%%%%%%%%%%%%%

By the third iteration, the coordinates of $y_0$ in this cycle have been ``flushed out''
and replaced (or ``stacked'') with images of $r$. Also, further iterations affect no further changes on
this cycle.  For $M\geq \ell-k$, the terms $(x,y_M)$ of Sequence~\ref{Sequence} agree on
all cycles that permute at least one vertex of $\{1,\ldots,k\}$. (A cycle
permuting only indices in $\{k+1,\ldots,\ell\}$ has not been flushed out.)

Consider now what happens when we apply $\varphi$ to the stacked vertex
$(x,y_M)$. This is shown in Figure~\ref{Fig:PhiFlushed}, with $(x,y_0)$ as in the bottom of Figure~\ref{Fig:Iterations}.
Notice that $\varphi(x, y_M)=\big(\theta(x), \eta(y_M)\big)$,
where $\theta:V(A)\to V(A)$ is a bijection and $\eta(y_M)$ is some vertex of
$S(B)$.

\smallskip
\noindent
{\bf Remark 1:} Note that $\eta$ need not be the identity. If $\pi$ has a non-trivial cycle that
permutes only indices in $\{k+1,\ldots,\ell\}$, then the corresponding coordinates of
$y_M$ do not get stacked, so $\varphi$ may alter these coordinates. 

%%%%%%%%%%%%% BEGIN FIGURE 8 %%%%%%%%%%%%%%%%
\begin{figure}[H]
\centering
\begin{tikzpicture}[scale=1.05,style=thick]
{\small
\draw (1.4,0.65) node {\small $\varphi_{\pi^{4}(i)}$};
\draw (2.6,0.75) node {$\varphi_{\pi^{5}(i)}$};
\draw (4.6,0.75) node {$\varphi_{\pi^{6}(i)}$};
\draw (3,1.6) node {$\varphi_i\circ\varphi_{\pi(i)}\circ\varphi_{\pi^2(i)}\circ\varphi_{\pi^3(i)}$};
\draw (8.5,0.9) node {$\mbox{id}$};
\draw (9.5,0.9) node {$\mbox{id}$};
\draw (11.5,0.9) node {$\mbox{id}$};
}
\draw [<-,>=stealth] (0.6,0.02)..controls +(0,0.65) and +(0,0.65)..(1.4,0.0);
\draw [<-,>=stealth] (1.6,0.02)..controls +(0,0.7) and +(0,0.7)..(3.4,0.0);
\draw [<-,>=stealth] (3.6,0.02)..controls +(0,0.7) and +(0,0.7)..(5.4,0.0);
\draw [<-,>=stealth] (8.6,0.02)..controls +(0.7,0.7) and +(-0.7,0.7)..(8.4,0.0);
\draw [<-,>=stealth] (9.6,0.02)..controls +(0.7,0.7) and +(-0.7,0.7)..(9.4,0.0);
\draw [<-,>=stealth] (11.6,0.02)..controls +(0.7,0.7) and +(-0.7,0.7)..(11.4,0.0);

\draw [<-,>=stealth] (5.6,0.02)..controls +(0,1.75) and +(0,1.9)..(0.4,0.0);

\foreach \x in {0,1,3,5,8,9,11} \draw [fill=lightgray,color=lightgray] (\x,0)--+(0,-0.5)--+(1,-0.5)--+(1,0);
\draw (0,0)--(7,0)--(7,-0.5)--(0,-0.5)--(0,0);
\draw (7.1,0)--(12,0)--(12,-0.5)--(7.1,-0.5)--(7.1,0);
\foreach \x in {2.5,4.5, 6.5,7.5,10.5} \draw (\x,-0.25) node {$\cdots$};
\foreach \x in {1,2,3,4,5,6,8,9,10,11} \draw (\x,0)--+(0,-0.5);
{\small
\draw (-1.8,-0.25) node [right] {$(x,y_M)=$};
\draw (0.5,-0.25) node {$r$};
%\draw (1.5,-0.25) node {$s$};
%\draw (3.5,-0.25) node {$t$};
%\draw (5.5,-0.25) node {$u$};
\draw (8.5,-0.25) node {$\bar{\bar{\bar{r}}}$};
\draw (9.5,-0.25) node {$\bar{\bar{r}}$};
\draw (11.5,-0.25) node {$\bar{r}$};
}

\draw (3.5,-0.9) node {$\underbrace{\mbox{\rule{2.9in}{0in}}}_{S(A)}$};
\draw (9.5,-0.9) node {$\underbrace{\mbox{\rule{2in}{0in}}}_{S(B)}$};
\end{tikzpicture}
\caption{The effect of $\varphi$ on a stacked vertex $(x,y_M)$.}
\label{Fig:PhiFlushed}
\end{figure}
%%%%%%%%%%%%% END FIGURE 8 %%%%%%%%%%%%%%%%

\noindent
{\bf Remark 2:} Our cycle $\big(\pi^4(i), \pi^5(i), \pi^6(i),i, \pi(i), \pi^2(i), \pi^3(i)\big)$ of figures~\ref{Fig:ThetaPhi}, \ref{Fig:Iterations} and~\ref{Fig:PhiFlushed}
has a string of entries between $1$ and $k$, followed by a string
between $k+1$ and $\ell$. In general a cycle of $\pi$ may alternate between
these two types of strings numerous times. Notice that in such a case coordinates
of $S(B)$ still get stacked with images coordinates of $S(A)$.

\smallskip
Since $\varphi(a,b)=(a',b)$,
the stacking operation does not alter $(a,b)$.
After $M$ iterations we still have
$(a,b_M)=(a,b)$, so
$(a',b)=\varphi(a,b)=\varphi(a, b_M)=(\theta(a), \eta(b_M))$.
This implies
\begin{equation}
\theta(a)=a'.
\label{Eqn:Theta2}
\end{equation}

To complete Part 1, we just need to show $\theta\in{\rm Aut}(A)$.
Take $xy\in E(A)$. We claim
$\theta(x)\theta(y) \in E(A)$. Fix $bb'\in E(B)$, so that
$(x,b)(y,b')\in E(A\times B)$. Apply the stacking operation on the two endpoints, in
parallel.
\[\begin{array}{llllll}
%\multicolumn{6}{l}{\mbox{{\bf Stacking Operation}}}  \\
& 0. & \mbox{Begin with input edge:} &
(x,b)(y,b')& \in & E(A\times B) \\
& 1. &  \mbox{Apply $\varphi$ to endpoints:} &
(x_1,b_1)(y_1,b_1')& \in & E(A\times B)\\
& 2. &  \mbox{Replace $x_1$ with $x$, and $y_1$ with $y$:} &
(x,b_1)(y,b_1')& \in & E(A\times B)
\end{array}\]
After $M\geq \ell-k$ iterations, we have $(x,b_M)(y,b_M') \in  E(A\times B)$.
Applying $\varphi$ to both endpoints gives
$\big(\theta(x), \eta(b_M)\big)\big(\theta(y),\eta(b_M')\big) \in  E(A\times B)$.
Thus $\theta(x)\theta(y)\in E(A)$, meaning $\theta:A\to A$
is a bijective homomorphism, hence  also an automorphism because $A$ is finite.

In summary, $\theta\in {\rm Aut}(A)$, and
$\theta(a)=a'$ by Equation~(\ref{Eqn:Theta2}).
This means $A$ is vertex-transitive.
Because the direct product is commutative, it follows that the other factor
$B$ is also vertex transitive.

\medskip
\noindent
{\bf Part 2.} Assume $A$ and $B$ are bipartite and
$A\times B$ is vertex-transitive. We will show $A$ is
vertex-transitive. (And thus so is $B$, by commutativity of $\times$.)
To begin, we analyze skeletons. Proposition~\ref{Prop:Conn}
implies $S(A)=A_0+A_1$ is the disjoint union of two graphs
whose respective vertex sets are the partite sets of $A$.
Similarly $S(B)=B_0+B_1$. Thus
\begin{eqnarray*}
S(A\times B)=S(A)\Box S(B)&=&(A_0+A_1)\Box(B_0+B_1)\\
&=&A_0\Box B_0+A_0\Box B_1+A_1\Box B_0+A_1\Box B_1.
\end{eqnarray*}
This is illustrated in Figure~\ref{Fig:BipProd}.
By Weichsel's theorem, $A\times B$ is bipartite and has two
components. One component has as partite sets the vertices
of $A_0\Box B_0$ and $A_1\Box B_1$, and the other component's
partite sets are the vertices of $A_0\Box B_1$ and $A_1\Box B_0$.


%%%%%%%%%%%%% BEGIN FIGURE 9 %%%%%%%%%%%%%%%%
\begin{figure}[H]
\centering
\begin{tikzpicture}[style=thick, scale=0.9]
\draw (2,1.1)--+(2,2) (2,1.5)--+(1.5,1.5) (2,2)--+(1.5,1.5);
\draw (2.5,3.6)--+(2,-2) (2,3)--+(2,-2) (2.5,3)--+(2,-2);
\draw (2.1,0.4)--(3.5,0.2) (2.1,0.5)--(3.5,0.5) (3.5,0.1)--(2.1,0.1)--(3.5,0.4);
\draw (0.4,2)--(0.2,3) (0.5,2)--(0.5,3) (0.1,3)--(0.1,2)--(0.4,3);
\foreach \x in {1,3.4} \foreach \y in {1,3} \draw [fill=white]
(\x,\y)--+(1.5,0)--+(1.5,1.2)--+(0,1.2)--+(0,0);

\foreach \x in {1,3.4} \draw [fill=white]
(\x,0)--+(1.5,0)--+(1.5,0.7)--+(0,0.7)--+(0,0);
\foreach \y in {1,3} \draw [fill=white]
(0,\y)--+(0,1.2)--+(0.6,1.2)--+(0.6,0)--+(0,0);

{\small
\draw (0.3,1.3) [fill=black] circle (2pt) node [above] {$b$};
\draw (1.3,1.3) [fill=black] circle (2pt) node [right] {{\footnotesize $(a,\!b)$}};
\draw (3.7,1.3) [fill=black] circle (2pt) node [right] {{\footnotesize $(a'\!,\!b)$}};
\draw (1.3,0.2) [fill=black] circle (2pt) node [above] {$a$};
\draw (3.7,0.2) [fill=black] circle (2pt) node [above] {$a'$};
}
\foreach \x in {0,1} \draw (2*\x+1.8,-0.3) node {\small $A_\x$};
\foreach \x in {0,1} \draw (-0.3,2*\x+1.6) node {\small $B_\x$};
\draw (5,0.3) node {$A$};
\draw (0.3,4.55) node {$B$};
\draw (5.8,2.5) node {$A\times B$};
\draw (4.15,3.7) node {$A_1\!\Box\! B_1$};
\draw (1.75,3.7) node {$A_0\!\Box\! B_1$};
\draw (4.15,1.9) node {$A_1\!\Box\! B_0$};
\draw (1.75,1.9) node {$A_0\!\Box\! B_0$};
\draw [<-,>=stealth] (2.5,3.8)..controls +(0.3,0.1) and +(-0.3,0.1)..+(0.9,0);
\draw  (2.95,3.9) node [above] {$\psi_1$};
\draw [->,>=stealth] (2.5,1.35)..controls +(0.3,-0.1) and +(-0.3,-0.1)..+(0.9,0);
\draw  (2.95,1) node {$\psi_0$};
\end{tikzpicture}
\caption{The effect of $\varphi$ on a direct product of two connected bipartite graphs $A$ and $B$.}
\label{Fig:BipProd}
\end{figure}
%%%%%%%%%%%%% END FIGURE 9 %%%%%%%%%%%%%%%%

As $A\times B$ is vertex-transitive, it has automorphisms mapping
any of its partite sets to any other, so by
Proposition~\ref{Prop:SkelMap}, the four skeleton components
$A_0\Box B_0$, $A_0\Box B_1$, $A_1\Box B_0$ and $A_1\Box B_1$
are all isomorphic to one other.
By cancellation, $A_0\cong A_1$ and $B_0\cong B_1$.

Let $a,a'\in V(A)$. We will produce an automorphism
$\theta$ of $A$ with $\theta(a)=a'$. Notice that it suffices to prove this for
$a$ and $a'$ in different partite sets of $A$. For if this is established and
$a,a'$ are in the same partite set, then we can take an $a''$ in the opposite partite set.
The composition of two automorphisms mapping $a\mapsto a''$ and
$a''\mapsto a'$  is an automorphism of $A$ mapping $a$ to $a'$.
Thus assume $a\in V(A_0)$ and $a'\in V(A_1)$.
Fix some $b\in V(B_0)$, and let
$\varphi$ be an automorphism of $A\times B$ with
$\varphi(a,b)=(a',b)$. 
(See Figure~\ref{Fig:BipProd}.)
From this we will now construct $\theta\in\mbox{Aut}(A)$ with $\theta(a)=a'$.

The map $\varphi$ restricts to isomorphisms
$\psi_1:A_1\Box B_1\to A_0\Box B_1$ and
$\psi_0:A_0\Box B_0\to A_1\Box B_0$, which, together,
send one component of $A\times B$ to the other.
(See Figure~\ref{Fig:BipProd}.)
%Let's agree to modify $\varphi$ (if necessary) so that on the
%other component it restricts to
%$\varphi_1^{-1}:A_0\Box B_1\to A_1\Box B_1$ and
%$\varphi_1^{-1}:A_1\Box B_0\to A_0\Box B_0$. That is,
%we can assume $\varphi$ is an involution that swaps the
%two components of $A\times B$.

Prime factor $A_0$ as $A_0=G_1\Box\cdots \Box G_k$, and $B_0$ as
$B_0=G_{k+1}\Box\cdots \Box G_\ell$. As $A_1\cong A_0$
and $B_1\cong B_0$,
we can label the vertices of $A_1$ and $B_1$ so that they also have
prime factorizations $A_1=G_1\Box\cdots \Box G_k$ and
$B_1=G_{k+1}\Box\cdots \Box G_\ell$.
Then
{\small
\[\begin{array}{ll}
A_0\!\Box\! B_1 =
\overbrace{G_1\Box\cdots \Box G_k}^{A_0}\;\Box\; \overbrace{G_{k+1}\Box\cdots \Box G_\ell}^{B_1},&
A_1\!\Box\! B_1 =
\overbrace{G_1\Box\cdots \Box G_k}^{A_1}\;\Box\; \overbrace{G_{k+1}\Box\cdots \Box G_\ell}^{B_1},\\
A_0\!\Box\! B_0 =
\overbrace{G_1\Box\cdots \Box G_k}^{A_0}\;\Box\; \overbrace{G_{k+1}\Box\cdots \Box G_\ell}^{B_0},&
A_1\!\Box\! B_0 =
\overbrace{G_1\Box\cdots \Box G_k}^{A_1}\;\Box\; \overbrace{G_{k+1}\Box\cdots \Box G_\ell}^{B_0},
\end{array}\]
}
and our restricted isomorphisms  $\psi_1$ and $\psi_0$  are
{\small
\begin{eqnarray}
\label{Map:PhiX2}
\psi_1:(G_1\Box\cdots \Box G_k)\;\Box\; (G_{k+1}\Box\cdots \Box G_\ell)&\longrightarrow &(G_1\Box\cdots \Box G_k)\;\Box\; (G_{k+1}\Box\cdots \Box G_\ell),\\
\label{Map:PhiY2}
\psi_0:(G_1\Box\cdots \Box G_k)\;\Box\; (G_{k+1}\Box\cdots \Box G_\ell )&\longrightarrow
& (G_1\Box\cdots \Box G_k)\;\Box\; (G_{k+1}\Box\cdots \Box G_\ell).
\end{eqnarray}
}
We have now coordinatized $A_0$, $A_1$, $B_0$ and $B_1$ (hence also $A\times B$) so that
\[\begin{array}{ccc}
a=(a_{1},\ldots, a_{k}), &
a'=(a_{1}',\ldots, a_{k}'), &
b=\,(b_{k+1},\ldots, b_\ell)
\end{array}\]
for tuples of $a_{i}, a_{i}'\in V(G_i)$ ($1\leq i\leq k$), and $b_i\in V(G_i)$
($k+1\leq i\leq \ell$).

Theorem~\ref{Prop:Cartesian:Iso} applied to
(\ref{Map:PhiX2}) and~(\ref{Map:PhiY2}) yields permutations
$\pi$ and $\sigma$ of $\{1,\ldots,\ell\}$ and isomorphisms
$\varphi_{i}:G_{\pi(i)}\to G_i$
and
$\varphi_{i}':G_{\sigma(i)}\to G_i$
so that
\begin{eqnarray}
\label{Map:PhiX3}
\mbox{\small $\psi_1\big((x_1,\ldots x_k),(x_{k+1},\ldots,x_\ell)\big)=
\big((\varphi_{1}(x_{\pi(1)}),\ldots, \varphi_{k}(x_{\pi(k)})),
(\varphi_{k\!+\!1}(x_{\pi(k\!+\!1)}), \,\ldots,\,
\varphi_{\ell}(x_{\pi(\ell)}))\big),$}\\
\label{Map:PhiY3}
\mbox{\small $\psi_0\big((x_1,\ldots x_k),(x_{k+1},\ldots,x_\ell)\big)=
\big((\varphi_{1}'(x_{\sigma(1)}),\ldots, \varphi_{k}'(x_{\sigma(k)})),
(\varphi_{k\!+\!1}'(x_{\sigma(k\!+\!1)}), \,\ldots,\,
\varphi_{\ell}'(x_{\sigma(\ell)}))\big).$}
\end{eqnarray}

Consider the effect of $\psi_1$ on a typical cycle of $\pi$, say a cycle
$(i,\pi(i), \pi^2(i), \ldots, \pi^6(i))$ of length 7, similar to that of
 Figure~\ref{Fig:ThetaPhi} (except that this time the map is
 $A_1\Box B_1\to A_0\Box B_1$ rather than $S(A)\Box S(B)\to S(A)\Box S(B)$).
 This is represented schematically in
Figure~\ref{Fig:ThetaPhi2}.

%%%%%%%%%%%%% BEGIN FIGURE 10 %%%%%%%%%%%%%%%%
\begin{figure}[H]
\centering
\begin{tikzpicture}[scale=1.08,style=thick]
\foreach \x in {0,1,3,5,8,9,11} \foreach \y in {0,2}\draw [fill=lightgray,color=lightgray] (\x,\y)--+(0,-0.5)--+(1,-0.5)--+(1,0);
\draw (0,2)--(7,2)--(7,1.5)--(0,1.5)--(0,2);
\draw (0,0)--(7,0)--(7,-0.5)--(0,-0.5)--(0,0);
\draw (7.1,2)--(12,2)--(12,1.5)--(7.1,1.5)--(7.1,2);
\draw (7.1,0)--(12,0)--(12,-0.5)--(7.1,-0.5)--(7.1,0);
\foreach \x in {1,2,3,4,5,6,8,9,10,11}  \foreach \y in {0,2} \draw (\x,\y)--+(0,-0.5);
\foreach \x in {2.5,4.5, 6.5,7.5,10.5}  \foreach \y in {0,2} \draw (\x,\y-0.25) node {$\cdots$};
{\small
%\draw (-0.5,-0.25) node {$\varphi$};
\foreach \y in {0,2} \draw (0.5,\y-0.25) node {$G_{\pi^4(i)}$};
\foreach \y in {0,2} \draw (5.5,\y-0.25) node {$G_i$};
\foreach \y in {0,2} \draw (8.5,\y-0.25) node {$G_{\pi(i)}$};
\foreach \y in {0,2} \draw (9.5,\y-0.25) node {$G_{\pi^{2}(i)}$};
\foreach \y in {0,2} \draw (11.5,\y-0.25) node {$G_{\pi^{3}(i)}$};
\foreach \y in {0,2} \draw (1.5,\y-0.25) node {$G_{\pi^{5}(i)}$};
\foreach \y in {0,2} \draw (3.5,\y-0.25) node {$G_{\pi^{6}(i)}$};

\draw (0,0.35) node {\small $\varphi_{\pi^{4}(i)}$};
\draw (2.1,0.35) node {$\varphi_{\pi^{5}(i)}$};
\draw (4.1,0.35) node {$\varphi_{\pi^{6}(i)}$};
\draw (5.85,0.35) node {$\varphi_{i}$};
\draw (8.05,0.35) node {$\varphi_{\pi^{\,}(i)}$};
\draw (10.05,0.3) node {$\varphi_{\pi^{2}(i)}$};
\draw (11.6,0.5) node {$\varphi_{\pi^{3}(i)}$};
}
\draw [<-,>=stealth] (0.5,0)..controls +(0,1) and +(-0.5,-0.5)..(1.5,1.5);
\draw [<-,>=stealth] (1.5,0)..controls +(0,1.1) and +(-0.5,-0.5)..(3.5,1.5);
\draw [<-,>=stealth] (3.5,0)..controls +(0,1.2) and +(-0.5,-0.5)..(5.5,1.5);
\draw [<-,>=stealth] (5.5,0)..controls +(0,1.3) and +(-0.5,-0.5)..(8.5,1.5);
\draw [<-,>=stealth] (8.5,0)..controls +(0,1.4) and +(-0.5,-0.5)..(9.5,1.5);
\draw [<-,>=stealth] (9.5,0)..controls +(0,1.5) and +(-0.5,-0.5)..(11.5,1.5);
\draw [<-,>=stealth] (11.5,0)..controls +(0,1.5) and +(0,-1.5)..(0.5,1.5);

\draw (3.5,2.4) node {$\overbrace{\mbox{\rule{2.9in}{0in}}}^{\mbox{\small $A_1$}}$};
\draw (9.5,2.4) node {$\overbrace{\mbox{\rule{2in}{0in}}}^{\mbox{\small $B_1$}}$};
\draw (3.5,-0.9) node {$\underbrace{\mbox{\rule{2.9in}{0in}}}_{\mbox{\small $A_0$}}$};
\draw (9.5,-0.9) node {$\underbrace{\mbox{\rule{2in}{0in}}}_{\mbox{\small $B_1$}}$};

\end{tikzpicture}
\caption{The effect of $\psi_1$ on coordinates corresponding to a typical cycle of $\pi$.
}
\label{Fig:ThetaPhi2}
\end{figure}
%%%%%%%%%%%%% END FIGURE 10 %%%%%%%%%%%%%%%%

Notice that when we apply the stacking operation to a vertex
$(x,y_0)\in V(A_1\Box B_1)$, we arrive at a vertex $(x,y_1)$
that is still in $V(A_1\Box B_1)$. Furthermore, iterations of the stacking
operation on $(x,y_0)$ are exactly as indicated in Figure~\ref{Fig:Iterations},
and we get a sequence
\[\begin{array}{ll}
(x,y_0), \;\; (x,y_1), \;\; (x,y_2), \;\; \ldots, \;\;(x,y_M) & \mbox{in $A_1\Box B_1$}.
\end{array}\]
As in the first part of the proof, if $M\geq \ell-k$, then, in general, any $j$th coordinate of
$y_M$ for which  $\pi^s(j)\leq k$ (for some $s$) has been flushed out and replaced with the
image of a vertex of some $G_i$ with $1\leq i \leq k$.
It follows that 
$\psi_1(x,y_M)=\big(\theta_1(x), \eta_1(y_M)\big)$, where
$\theta_1:V(A_1)\to V(A_0)$ is a bijection.  (This is illustrated in Figure~\ref{Fig:ThetaPhi3}.)

%%%%%%%%%%%%% BEGIN FIGURE 11 %%%%%%%%%%%%%%%%
\begin{figure}[H]
\centering
\begin{tikzpicture}[scale=1.08,style=thick]
\foreach \x in {0,1,3,5,8,9,11} \foreach \y in {0,2}\draw [fill=lightgray,color=lightgray] (\x,\y)--+(0,-0.5)--+(1,-0.5)--+(1,0);
\draw (0,2)--(7,2)--(7,1.5)--(0,1.5)--(0,2);
\draw (0,0)--(7,0)--(7,-0.5)--(0,-0.5)--(0,0);
\draw (7.1,2)--(12,2)--(12,1.5)--(7.1,1.5)--(7.1,2);
\draw (7.1,0)--(12,0)--(12,-0.5)--(7.1,-0.5)--(7.1,0);
\foreach \x in {1,2,3,4,5,6,8,9,10,11}  \foreach \y in {0,2} \draw (\x,\y)--+(0,-0.5);
\foreach \x in {2.5,4.5, 6.5,7.5,10.5}  \foreach \y in {0,2} \draw (\x,\y-0.25) node {$\cdots$};
{\small
\draw (-1.9,1.8) node [right] {$(x,y_M)=$};
\draw (-2.2,-0.25) node [right] {$\psi_1(x,y_M)=$};
%\draw (-0.5,-0.25) node {$\varphi$};
\foreach \y in {2} \draw (0.5,\y-0.25) node {$r$};
\foreach \y in {2} \draw (5.5,\y-0.25) node {$u$};
\foreach \y in {0,2} \draw (8.5,\y-0.25) node {$\bar{\bar{\bar{r}}}$};
\foreach \y in {0,2} \draw (9.5,\y-0.25) node {$\bar{\bar{r}}$};
\foreach \y in {0,2} \draw (11.5,\y-0.25) node {$\bar{r}$};
\foreach \y in {2} \draw (1.5,\y-0.25) node {$s$};
\foreach \y in {2} \draw (3.5,\y-0.25) node {$t$};

\draw (4.6,0.75) node [right] {\small $\varphi_i\!\circ\!\varphi_{\pi(i)}\!\circ\!\varphi_{\pi^2(i)}\!\circ\!\varphi_{\pi^3(i)}$};
\draw (0,0.35) node {\small $\varphi_{\pi^{4}(i)}$};
\draw (2.1,0.35) node {$\varphi_{\pi^{5}(i)}$};
\draw (4.1,0.35) node {$\varphi_{\pi^{6}(i)}$};
%\draw (5.85,0.35) node {$\varphi_{i}$};
\draw (8.5,0.75) node [right] {$\mbox{id}$};
\draw (9.5,0.75) node  [right] {$\mbox{id}$};
\draw (11.5,0.75) node  [right] {$\mbox{id}$};
}
\draw [<-,>=stealth] (0.5,0)..controls +(0,1) and +(-0.5,-0.5)..(1.5,1.5);
\draw [<-,>=stealth] (1.5,0)..controls +(0,1.1) and +(-0.5,-0.5)..(3.5,1.5);
\draw [<-,>=stealth] (3.5,0)..controls +(0,1.2) and +(-0.5,-0.5)..(5.5,1.5);
\draw [<-,>=stealth] (8.5,0)--(8.5,1.5);
\draw [<-,>=stealth] (9.5,0)--(9.5,1.5);
\draw [<-,>=stealth] (11.5,0)--(11.5,1.5);
\draw [<-,>=stealth] (5.5,0)..controls +(0,1.3) and +(0,-1.5)..(0.5,1.5);

\draw (3.5,2.4) node {$\overbrace{\mbox{\rule{2.9in}{0in}}}^{\mbox{\small $A_1$}}$};
\draw (9.5,2.4) node {$\overbrace{\mbox{\rule{2in}{0in}}}^{\mbox{\small $B_1$}}$};
\draw (3.5,-0.9) node {$\underbrace{\mbox{\rule{2.9in}{0in}}}_{\mbox{\small $A_0$}}$};
\draw (9.5,-0.9) node {$\underbrace{\mbox{\rule{2in}{0in}}}_{\mbox{\small $B_1$}}$};

\end{tikzpicture}
\caption{The effect of $\psi_1$ on a stacked vertex.
}
\label{Fig:ThetaPhi3}
\end{figure}
%%%%%%%%%%%%% END FIGURE 11 %%%%%%%%%%%%%%%%

Likewise the stacking operation applied to a vertex
$(w,z_0)\in V(A_0\Box B_0)$ yields a vertex $(w,z_1)\in V(A_0\Box B_0)$.
Applying the stacking operation iteratively  to $(w,z_0)$, gives a sequence
\[\begin{array}{ll}
(w,z_0), \; (w,z_1), \; (w,z_2), \;\,\ldots, \; \;(w,z_M) & \mbox{in $A_0\Box B_0$}.
\end{array}\]
As before,
$\psi_0(w,z_M)=\big(\theta_0(w), \eta_0(z_M)\big)$, for some
bijection $\theta_0:V(A_0)\to V(A_1)$. 

Let $\theta:A\to A$ be the map that restricts to $\theta_0$ on $V(A_0)$ and
$\theta_1$ on $V(A_1)$. Thus $\theta$ reverses the bipartition of $A$.
Because $\varphi(a,b)=(a',b)$, the stacking operation does not alter $(a,b)$. That is,
applying it $M$ times to $(a,b)$ yields $(a,b_M)$ with $b_M=b$. Therefore
\[(a',b)=\varphi(a,b)=\varphi(a,b_M)=\psi_0(a,b_M)=(\theta_0(a), \sigma_0(a,b_M)),\]
which means $\theta_0(a)=a'$, that is, $\theta(a)=a'$.

%But the fact that $\varphi^2=\mbox{id}$ means
%$\pi_1$ and $\pi_0$ have order 2, that is, they are products of disjoint
%transpositions.
%For example, Figure~\ref{Fig:BipartiteStacking} shows the effect of a  typical
%$\varphi$.


%To reach our goal, we define a {\bf stacking operation} on vertices of
%$A\times B$.  This idea, introduced in~\cite{HAMMACK3},
%takes an input vertex $(u,v)\in V(A\times B)$, applies~$\varphi$, then
%throws away the first coordinate and replaces it with $u$.
%\[\begin{array}{llllll}
%\multicolumn{6}{l}{\mbox{{\bf Stacking Operation}}}  \\
%& 0. & \mbox{Begin with input vertex} &
%(u,\;v)& \in & V\big(S(A)\Box S(B)\big)=V(A\times B) \\
%& 1. &  \mbox{Apply $\varphi$:} &
%(u_1,v_1)& \in & V\big(S(A)\Box S(B)\big)=V(A\times B)\\
%& 2. &  \mbox{Replace $u_1$ with $u$:} &
%(u,\;v_1)& \in & V\big(S(A)\Box S(B)\big)=V(A\times B) \\
%\end{array}\]

%Applying the stacking operation to  $(u,v)$ yields an output vertex
%$(u,v_1)$.  The important thing to notice is that the first coordinate of
%$\varphi(u,v_1)$ does not depend of $v_1$.
%For example, let's trace this  with $(u,v)\in V(S(A_1)\Box S(B_1))$, and where we
%only consider those coordinates shown in Figure~\ref{Fig:BipartiteStacking}.
%(Note $\varphi(u,v)=\varphi_1(u,v)$ in this case, so
%refer to the top part of the figure.)



%\bigskip
%Note that $\varphi(u,v_1)=(\theta_1(u), \sigma_1(u,v_1))$, where $\theta_1:V(A_1)\to V(A_0)$
%is a bijection, and $\sigma_1$ is a map $V(A_1\Box B_1)\to V(B_1)$.
%In exactly the same way, if $(u,v)\in V(A_0\Box B_0)$, then applying stacking operation to %this
%vertex gives some $(u,v_1)\in V(A_0\Box B_0)$, and then applying $\varphi$ gives
%$\varphi(u,v_1)=(\theta_0(u),\sigma_0(u,v_1))$ for a bijection $\theta_0:V(A_0)\to V(A_1)$.

%Define a bijection $\theta:V(A)\to V(A)$ to be $\theta_0$ on $V(A_0)$ and $\theta_1$ on $V(A_1)$.



To complete the proof we need to show $\theta\in \mbox{Aut}(A)$.
For this, take an edge $wx\in E(A)$, with $w\in V(A_0)$ and
$x\in V(A_1)$. We claim  $\theta(w)\theta(x)\in E(A)$.
Fix an edge
$z_0y_0$ of $B$ with $z_0\in V(B_0)$ and $y_0\in V(B_1)$,
so $(w,z_0)(x,y_0)$ is an edge of $A\times B$ with
$(w,z_0)\in V(A_0\Box B_0)$ and $(x,y_0)\in V(A_1\Box B_1)$.
Apply the stacking operation on the two endpoints, in
parallel.
\[\begin{array}{llllll}
%\multicolumn{6}{l}{\mbox{{\bf Stacking Operation}}}  \\
& 0. & \mbox{Begin with input edge:} &
(w,z_0)(x,y_0)& \in & E(A\times B) \\
& 1. &  \mbox{Apply $\varphi$ ($\psi_0$ on left, $\psi_1$ on right):} &
(w_1,z_1)(x_1,y_1)& \in & E(A\times B)\\
& 2. &  \mbox{Replace $w_1$ with $w$, and $x_1$ with $x$:} &
(w,z_1)(x,y_1)& \in & E(A\times B) \\
\end{array}\]
Iterating this $M$ times produces an edge $(w,z_M)(x,y_M)\in E(A\times B)$.
Applying $\varphi$ to both endpoints, we get
\[\big(\theta_0(w), \sigma_0(z_M)\big)\big(\theta_1(x),\sigma_1(y_M)\big) \in  E(A\times B).\]
From this, $\theta_0(w)\theta_1(x)\in E(A)$, meaning $\theta(w)\theta(x)\in E(A)$. Thus
$\theta:A\to A$ is a bijective homomorphism, hence  also an automorphism because $A$ is finite.
As $\theta(a)=a'$, the graph $A$ is vertex transitive. The proof is complete.
\end{proof}

We now reach our main theorem.

\pagebreak
\begin{theorem}
\label{Theo:Main}
If $A$ and $B$ are both non-bipartite or both
bipartite, then $A\times B$ is vertex-transitive
if and only if both $A$ and $B$ are vertex-transitive.
If $A$ has an odd cycle and $B$ is bipartite, then $A\times B$ is
vertex-transitive if and only if both $A\times K_2$ and $B$ are
vertex-transitive.
\end{theorem}

\begin{proof}
If both $A$ and $B$ are vertex-transitive,
then Proposition~\ref{Prop:EasyDirection} implies $A\times B$ is vertex-transitive.
By the same proposition, if both $A\times K_2$ and $B$ are vertex-transitive and $B$ is bipartite,
then $A\times B$ is vertex-transitive (as $A\times K_2$ is bipartite).
Thus it remains to prove the converses of the two statements.

First, suppose $A$ and $B$ are non-bipartite, and $A\times B$ is
vertex-transitive. Write $A=A_1+\cdots +A_m$ and
$B=B_1+\cdots +B_m$ as disjoint unions of their components, with
$A_1$ and $B_1$ non-bipartite. Then $A\times B=\sum_{i,j} A_i\times B_j$.
By Weichsel's theorem, $A_i\times B_1$ and
$A_j\times B_1$ are connected for any $i,j$,
so these are two components of $A\times B$.
But as $A\times B$ is vertex-transitive, all
its components are isomorphic, so
$A_i\times B_1\cong A_j\times B_1$ and
$A_i\cong A_j$ by Theorem~\ref{Theorem:Lovasz}.
Thus any two components of $A$ are
isomorphic (and non-bipartite). By the same argument, any two
components of $B$ are isomorphic (and non-bipartite).

In the previous paragraph we remarked that $A_1\times B_1$ is
vertex-transitive.
By Lemma~\ref{Lemma:Thin},
$(A_1\times B_1)/R$ is vertex-transitive, and it is $R$-thin by the remarks in Section~\ref{Section:Thin}.
Because $(A_1\times B_1)/R\cong A_1/R\times B_1/R$, and both
$A_1/R$ and $B_1/R$ are non-bipartite, Proposition~\ref{Prop:ThinVT}
says $A_1/R$ and $B_1/R$ are vertex-transitive.

Now, because all $R$-classes of $A\times B$
have the same size, and each $R$-class of
$A\times B$ is a product of $R$-classes of
$A$ and $B$, respectively, we conclude that
all $R$-classes of $A$ (hence of $A_1$) have the same size, and
all $R$-classes of $B$ (hence of $B_1$) have the same size.
Lemma~\ref{Lemma:Thin} now implies that
$A_1$  and $B_1$ are vertex-transitive. But the components of $A$ are
all isomorphic to $A_1$, so $A$ is vertex-transitive. Likewise, so is
$B$.

Next suppose $A$ and $B$ are both bipartite, and 
$A\times B$ is vertex-transitive.
Again, $A\times B=\sum_{i,j} A_i\times B_j$, and by Weichsel's theorem,
each summand $A_i\times B_i$ has exactly two components.
Because $A\times B$ is vertex-transitive, its components are all
isomorphic, and vertex-transitive themselves, and it
follows that each $A_i\times B_i$ is vertex-transitive.

Thus $A_i\times B_1\cong A_j\times B_1$ for any
$i,j$, so  $A_i\cong A_j$ by Theorem~\ref{Theorem:Lovasz}, that is,
all components of $A$ are isomorphic. Similarly, all
components of $B$ are isomorphic.

By Lemma~\ref{Lemma:Thin},
$(A_1\times B_1)/R$ is vertex-transitive, and it is also $R$-thin.
As $(A_1\times B_1)/R\cong A_1/R\times B_1/R$, 
and $A_1/R$ and $B_1/R$ are both bipartite,
Proposition~\ref{Prop:ThinVT}
implies that both $A_1/R$ and $B_1/R$ are vertex-transitive.

From here, the proof proceeds exactly as in
the non-bipartite case, and we conclude that
both $A_1$ and $B_1$ are vertex-transitive, thus also $A$ and $B$.

For the second statement's converse, let $A$ be non-bipartite, $B$ bipartite, and $A\times B$ vertex-transitive. By
Proposition~\ref{Prop:EasyDirection}, $(A\times B)\times K_2\cong$
$(A\times K_2)\times B$ is vertex-transitive. As $A\times K_2$
and $B$ are bipartite, the first statement of the theorem (proved
above) implies that  $A\times K_2$ and $B$ are vertex-transitive.
\end{proof}

\begin{thebibliography}{99}
\setlength{\itemsep}{0pt}
\bibitem{CHANG} C.C.~Chang, Cardinal factorization of finite relational structures,
{\it Fund. Math.} {\bf 60} (1967)  251--269.

\bibitem{HAMMACK3} C.~Crenshaw and R.~Hammack, Edge-transitive bipartite
direct products, preprint.

%\bibitem{HAMMACK} R.~Hammack,
%W.~Imrich and S.~Klav\v{z}ar, Edge-transitive products, \emph{J.~Algebraic Combin.}, {\bf 43}(4) 837--850, 2016.

%\bibitem{HAMMACK2} R.~Hammack, On unique prime bipartite factors of graphs,
%{\it Discrete Math.}, {\bf 313}(9) (2013) 1018--1027.

%\bibitem{HAMMACK0} G.~Abay-Asmerom, R.~Hammack, C.~Larson and D.~Taylor,
%Direct product factorization of bipartite graphs with bipartition-reversing involutions,
%{\it SIAM Journal on Discrete Mathematics}, {\bf 23}(4)  (2010) 2042--2052.

\bibitem{HAMIMR} R.~Hammack and W.~Imrich, On Cartesian skeletons of graphs,
\emph{Ars Math. Contemp.},
  {\bf{2}} (2009) 191--205.


\bibitem{HIK} {\it Handbook of Product Graphs,} second edition, by R.~Hammack,
W.~Imrich and S.~Klav\v{z}ar, 
Series: Discrete Mathematics and Its Applications, CRC Press, Boca Raton, FL, 2011.

%\bibitem{HOLT} D.~Holt, A graph which is edge transitive but not arc transitive, {\it J.~Graph Theory}, (1981) 5 (2): 201--204.

%\bibitem{IMRICHEDGE} W.~Imrich, A.~Iranmanesh,
%S.~Klav\v{z}ar and A.~Soltani, Edge-transitive lexicographic and Cartesian products, {\it Discuss.~Math.~Graph Theory} (2016).

%\bibitem{IMRICHDIRECT} W.~Imrich, Factoring cardinal product graphs in polynomial time,
% {\it Discrete Math.,} {\bf 192}: (1998)119--144.

\bibitem{LOVASZ} L.~Lov\'{a}sz,  \textit{On the cancellation law among finite relational structures}, Period. Math. Hungar. {\bf 1}(2) (1971) 145--156.

\bibitem{FACTOR} R.~McKenzie, Cardinal multiplication of structures with a reflexive relation,
{\it Fund. Math.} {\bf 70} (1971)  59--101.

\bibitem{SAB} G.~Sabidussi, Graph multiplication, {\em Math. Z.}, {\bf 72} (1960) 446--457.

\bibitem{VIZ} G.V.~Vizing, The Cartesian product of graphs% (Russian),
{\em Vy\v{c}isl Sistemy}, {\bf 9} (1963) 30--43.

\bibitem{WILSON} S.~Wilson, A worthy family of semisymmetric graphs,
{\it Discrete Math.}, {\bf 271} (2003) 283--294.

\end{thebibliography}

\end{document}



