\documentstyle[fullpage,11pt]{article}
%% MACROS
%\newtheorem{theorem}{Theorem}
\newcommand{\DS}[1]{{\LARGE{\it [DS: {#1}]}}} %notes to Dan
%% NOTATION
\newcommand{\paren}[1]{\left({#1}\right)}
\newcommand{\Oh}[1]{O\!\paren{#1}}
\newcommand{\poly}[1]{{\em poly}\!\paren{#1}}
\newcommand{\TM}{\mbox{TM}}
\newcommand{\TIME}{\mbox{TIME}}
\newcommand{\A}{A}
\newcommand{\accept}{{\em accept}}
\newcommand{\reject}{{\em reject}}
\newcommand{\ppoly}{\mbox{$P/poly$} }
\newcommand{\cs}{\mbox{$\cal{C/S}$} }
%% FORMATTING
\newcommand{\nolineskips}{%
\setlength{\parskip}{0pt}%
\setlength{\parsep}{0pt}%
\setlength{\topsep}{0pt}%
\setlength{\partopsep}{0pt}%
\setlength{\itemsep}{0pt}}
\input{preamble.tex}
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\begin{document}
\lecture{5}{February 24, 1998}{Dan Spielman}{David Ratajczak}
\section{Introduction}
In this lecture we will prove a relationship between the polynomial
time hierarchy and another important class we will study: $\ppoly$.
First, we will define the class $\ppoly$. We then introduce the notion
of circuit complexity and show its relation to $\ppoly$. Finally, we
prove the main theorem stating that if NP can be decided by a sequence
of polynomial size circuits then the polynomial hierarchy collapses to
a very low level.
\section{\ppoly and Circuit Complexity}
We begin by defining the $\cs$ notation.\footnote{This definition was
not covered during this lecture, but touched upon briefly in the
previous lecture.}
\begin{definition} Let $\cal{C}$ be a class of languages and $\cal{S}$ be a
family of sequences of strings. Then define $\cs = \{L |
\,\exists A \in \cal{C}$ and $s \in \cal{S}$ s.t.\ $x \in L
\Leftrightarrow (x,s_{|x|}) \in A \}$.
\end{definition}
In other words, \cs is the complexity class computable by Turing
machines of complexity class $\cal{C}$, but which can use some
appropriately sized {\em advice string} dependent only on the length
of the input to aid in computation. Notice that this is more
restrictive than being able to use a different advice string for every
input. In this lecture, we will restrict our attention to one example
of the \cs notation: the class $\ppoly$.
\begin{definition} $\ppoly = \{L |\, \exists A \in P$, a sequence of strings
$\{s_i\}_{i\in \bf{N}}$, and a constant $k$ s.t.\ $|s_i|=\Oh{i^k}$
and $x \in L \Leftrightarrow (x,s_{|x|}) \in A\}$.
\end{definition}
This definition follows directly from that of the \cs notation, where
$\cal{C} = P$ and our ``advice string'' must be of length polynomial
in the length of the input string. Notice that \ppoly is allowing
some non-uniformity in its languages. Every language in \ppoly has
some ``magic string'' for every input length that simplifies the
computation of the rest of the strings. We would like to know if any
interesting classes we already know about have this property. For
instance, is $\ppoly=NP$? We will soon answer this question, but
first we introduce the notion of circuit complexity and its relation
to $\ppoly$.
\begin{definition}
Given a boolean function $$f:\{0,1\}^a\longrightarrow\{0,1\}^b,$$ the
{\bf circuit complexity} of $f$, written as $C_f$, is the least number
$m$ such that there exists a circuit with $m$ wires that computes $f$
\end{definition}
We point out here that our circuit is fixed in size and must be
finite. We now define the circuit complexity class analogous to $P$.
\begin{definition}
A language $L$ has {\bf polynomial circuit complexity} if there exists
a constant $k$ such that for all $n$, the function $f_n$ that is $1$
if and only if its input (of length $n$) is in $L$, has circuit
complexity $\Oh{n^k}$.
\end{definition}
We now reveal the somewhat non-intuitive relationship between
polynomial circuit complexity and \ppoly.
\begin{proposition}
The complexity class \ppoly is equivalent to the class of languages
with polynomial circuit complexity. In other words $L\in \ppoly$ if
and only if $L$ has polynomial circuit complexity.
\end{proposition}
\begin{proof}
$\Leftarrow$: If $L$ has polynomial circuit complexity, then for each
$n$, there is a circuit of size polynomial in $n$ that decides
membership in $L$ for all words of length $n$. We can encode this
circuit in a string, $s_n$, also of size polynomial in $n$. Now we
can construct a polynomial time TM that takes a string $x$ and
$s_{|x|}$ and simulates the circuit described by $s_{|x|}$ running on
input $x$.
$\Rightarrow$: Assume $L \in \ppoly$. If the machine $M$ that decides
$L$ runs in $\TIME(\Oh{n^k})$, then we can construct a circuit $C_n$
of size at most $\Oh{n^{2k}}$ gates that simulates $M$ running on
input strings of length $n$ (as in Cook's construction). Hardwired in
each machine will be the advice string $s_n$, which is constant for
each input size $n$ and which grows polynomially in $n$.
\end{proof}
\section{$NP$ and $\ppoly$}
We will now state the main theorem of this lecture.
\begin{theorem}[Karp-Lipton + Sipser]\label{theorem1}
If $NP \subseteq \ppoly$ then $PH \subseteq \Sigma_2^P$. Thus the
polynomial hierarchy would ``collapse.''
\end{theorem}
To begin a proof of this theorem, we can first note that $NP \subseteq
\ppoly$ iff $SAT \in \ppoly$. Therefore $NP \subseteq \ppoly$ is
equivalent to: \{$\exists A \in P$, a constant $k$, and a sequence
$\{g_i\}_{i\in\bf{N}}$ s.t.\ $|g_i| = \Oh{(ki)^k}$ and $\phi \in
SAT \Leftrightarrow (\phi, g{|\phi|}) \in A$\}. So we are now assuming that
$NP\subseteq \ppoly$ and that we can obtain $A\in P$ with the above
property. For the remaining part of this section we will let $M$ be
the polynomial-time Turing machine that decides $A$.
\begin{definition}
We define a sequence of strings $\alpha$ to be {\em good} if it is of
the form $\alpha=(\alpha_1, \ldots, \alpha_l)$ s.t.\ $\forall \phi:|\phi|
\leq l, \phi \in SAT \Leftrightarrow (\phi,\alpha_{|\phi|})\in A$ and
$|\alpha_i| \leq (ki)^k$. $GOOD$ is the language of all string
sequences that are {\em good}.
\end{definition}
Obviously $\{g_1,g_2,\dots,g_l\}$ as defined above would be a {\em
good} sequence, though it is not necessarily unique.
For the proof of Theorem \ref{theorem1} we will need to show how to
decide $GOOD$ efficiently. As a warm-up, we will first show that
$GOOD\in\Pi_2^P$.
\begin{lemma} $GOOD \in \Pi_2^P$.\end{lemma}
\begin{proof}Assume we are given some sequence $\alpha$. Then we
can construct an ATM that performs the following steps:\\ On input
$(a_1,\dots,a_l)$
\begin{enumerate}
\item{Use $\forall$ to guess all strings $\phi$ s.t.\ $|\phi| \leq l$.}
\item{Run $M$ on the input $(\phi,\alpha_{|\phi|})$.}
\item{If $M$ accepts then existentially ($\exists$) guess a witness to verify that $\phi\in SAT$. If there is a witness, we accept, if not we reject.}
\item{If $M$ rejects then universally ($\forall$) guess all witnesses to verify that $\phi\notin SAT$. If any witness shows that $\phi\in SAT$ then reject. Otherwise accept.}
\end{enumerate}
The language accepted by this ATM is clearly in $\Pi_2^P$. Since this
ATM directly verifies the condition defined above for a sequence to be
{\em good}, we have $GOOD\in \Pi_2^P$.
\end{proof}
Before we can prove Theorem \ref{theorem1}, we will need to prove a
slightly stronger result than the one above. We will require one
additional concept, {\em self-reducibility}, to accomplish this.
\section{Self-reducibility}
\begin{definition}
We define a language $L$ to be {\em self-reducible} if there exists an
oracle polynomial time TM $M^{?}$ that on inputs of length $n$ only
asks its oracle questions of length less than $n$ and that accepts a
string $w$ iff $w \in L$. In other words a self-reducible language
has a well-formed recursive definition.
\end{definition}
\begin{proposition}
$SAT$ is self-reducible.
\end{proposition}
\begin{proof} To prove this, we note that any instance of $SAT$ is
equivalent to deciding the truth of the statement
$$\exists x_1 \exists x_2 \dots \exists x_n: \phi(x_1, x_2,\dots,
x_n)$$ for a given boolean expression $\phi$. We now fix $x_1$ to be
$0$ and $1$ respectively resulting in a union of two smaller
expressions $$\{\exists x_2\ldots\exists x_n:\phi(1, x_2, \dots,
x_n)\}\cup\{\exists x_2\dots \exists x_n:\phi(0, x_2, \dots, x_n)\}.$$
This expression is equivalent to the first but is composed of two
expressions of smaller size which can be fed to the recursive oracle.
After receiving the oracle's answer for the two subexpressions, it is
easy to accept if one of the queries returns an accept. We reject
otherwise.
\end{proof}
Using the self-reducibility of $SAT$, we can now prove a stronger
result for $GOOD$ than we proved above.
\begin{proposition} $GOOD \in coNP=\Pi_1^P$.\end{proposition}
\begin{proof} The proof involves using self-reducibility to replace the use of
$\exists$ in the weaker proof above. Specifically, given a sequence
$\alpha$, we decide whether $\alpha\in GOOD$ as follows:
\begin{enumerate}
\item{Use $\forall$ to pick all strings $\phi$ s.t.\ $|\phi| \leq l$.}
\item{Run $M$ on input $(\phi,\alpha_{|\phi|})$. If $|\phi|=1$ then we accept or reject based on whether or not $M$'s answer agrees with $\phi$ (which must be $0$ or $1$). Otherwise we continue.}\label{bs1}
\item{If $M$ rejects, then we proceed as we did previously and universally ($\forall$) check all witnesses to be sure that none of them imply that $\phi\in SAT$.}
\item{If $M$ accepts, then we use the self-reducibility of $SAT$ to generate $2$ smaller subexpressions $\phi_1$ and $\phi_2$. We use $\forall$ to recursively call line \ref{bs1} with $(\phi_1,\alpha_{|\phi_1|})$ and $(\phi_2,\alpha_{|\phi_2|})$ as inputs. And then accept if either of them accept.}
\end{enumerate}
Since we only use $\forall$ branching we can agglutinate all universal
quantifiers and see that the machine described above starts in a
$\forall$ state and never switches to a $\exists$ state. Therefore
$GOOD\in\Pi_1^P=co-NP$.
\end{proof}
\section{Proving the theorem}
We can now prove Theorem \ref{theorem1}. First, we restate the theorem as
follows:
\begin{theorem} If $NP \subseteq \ppoly$ then $\Sigma_3^P \subseteq
\Sigma_2^P$. This implies that the polynomial hierarchy would collapse to $\Sigma_2^P$.
\end{theorem}
\begin{proof} Consider an instance of $QBF_3$, which we will remember is
complete in $\Sigma_3^P$. The instance will have the form $\exists x
\forall y \exists z: \phi(x,y,z)$.
If we assume $NP \subseteq \ppoly$ then we can solve the instance of $QBF_3$
in $\Sigma_2^P$ by the following steps:
\begin{enumerate}
\item{Use $\exists$ to guess each possible advice sequence $\alpha$.}
\item{Use $\exists$ to guess each $x$.}
\item{Use $\forall$ to guess every $y$.}
\item{Test if $\alpha\in GOOD$.}
\item{If so, use $\alpha$ to decide $(\exists z: \phi(x,y,z),
\alpha_{|\exists z: \phi(x,y,z)|}) \in A$.}
\end{enumerate}
As we showed above, $GOOD \in co-NP$. We can therefore decide whether
$\alpha$ is good using only $\forall$. Furthermore, $(\exists z:
\phi(x,y,z), \alpha_{|\exists z: \phi(x,y,z)|}) \in A$ can be decided in
$P$. Therefore, an ATM requires only one alternation starting from
$\exists$ to decide an instance of $QBF_3$, showing $\Sigma_3^P
\subseteq \Sigma_2^P$. This completes the proof of Theorem
\ref{theorem1}.
\end{proof}
\end{document}