\documentclass[10pt]{article}
\usepackage{amsfonts}
\newtheorem{define}{Definition}
%\newcommand{\Z}{{\bf Z}}
\usepackage{psfig}
\oddsidemargin=0.15in
\evensidemargin=0.15in
\topmargin=-.5in
\textheight=9in
\textwidth=6.25in
\begin{document}
\input{preamble.tex}
\lecture{19}{November 15, 2004}{Madhu Sudan}{Kyomin Jung} \noindent
\\
\section{Overview}
In this lecture we will introduce and examine some topics of
Pseudo-randomness and we will see some applications of coding theory
to them. Especially we will define $l$-wise independent random
number generator function $G$ and construct it. And then we will
define and examine $\delta$-almost $l$-wise independent $G$, and
$\epsilon$-biased $G$. And finally we will give a construction of a
$\epsilon$-biased space $G$ using some results of coding theory.
\section{Use of randomness}
Usually a randomized algorithm $A$ takes $(x,y)$ as input where $x$
is ``real'' input and $y$ is a random string independent from $x$.
And we hope that for some desired function $f(x)$, $Pr[A(x,y)=f(x)]$
is higher than some criteria, where probability is taken over the
distribution of $y\in\{0,1\}^n$. Usually we assume that each bit of
$y$ is uniformly and independently distributed.
Then how can we obtain such random string $y$? We may
obtain $y$ by physical sources of randomness, for example, "Zener
Diode". But in many situations generating randomness by physical source may be very expensive. So computer
scientists try to design algorithm that use a few random inputs and
generates 'Pseudo-random' string that is pretty longer in size than its input.
\section{Pseudo-randomness}
Suppose that we are given a randomized algorithm $A$ that
satisfies
\begin{equation}
Pr_{y\in\{0,1\}^n}[A(x,y)=f(x)]\ge\frac{3}{2}
\end{equation}
One may hope to find a $G:\{0,1\}^t \rightarrow \{0,1\}^n$
satisfying
\begin{equation}
Pr_{s\in\{0,1\}^t}[A(x,G(s))=f(x)]\ge \frac{2}{3}-\epsilon.
\end{equation}
For small $\epsilon$. Here, We assume that $s\in\{0,1\}^t$ has uniform distribution.\\
\begin{itemize}
\item Question: For sufficiently small $\epsilon>0$, does there exist $G$ satisfying (2) for every $A$?\\
\item The answer is No.
\end{itemize}
( Fix $G:\{0,1\}^{n-1} \rightarrow \{0,1\}^n$. Then $\exists S\in
\{0,1\}^n$ such that $|S|=2^{n-2}$ and
\begin{equation}
Pr_{s\in\{0,1\}^{n-1}}[G(s)\in S]\ge \frac{1}{2}. \end{equation} Let
$x=\emptyset$ and Let $A(x,y)=1$ if $y\in S$, and $A(x,y)=0$
otherwise.
Then $Pr_{y\in\{0,1\}^n}[A(y)=0]=\frac{3}{4}$ but
$Pr_{s\in\{0,1\}^t}[A(G(s))=0]\le\frac{1}{2}$.)\\
So we may try to pick a broad class of Algorithms $W$ and have $G$
work for every $A\in W$. If we can do that for $W=\{$all polynomial
time algorithms$\}$ or $W=\{$all polynomial sized circuits$\}$, it
would be nice. But we don't know whether they have such $G$. For
next $W$'s it is known that they have such $G$'s.
\begin{itemize}
\item $C=\{$algorithms that depend on limited independence$\}$
\item $C=\{$algorithms that perform ``linear tests''$\}$
\end{itemize}
In this lecture, we will deal with the first case.
\section{l-wise independence}
\begin{definition}
We say $G:\{0,1\}^t \rightarrow \{0,1\}^n$ is \textrm{$l$-wise
independent} if $\forall T \subseteq [n]$, $|T|=l$, $\forall
b_1,b_2,\ldots,b_l\in\{0,1\}$,
\begin{equation}
Pr_{s\in\{0,1\}^t}[G(s)|_T
=(b_1,b_2,\ldots,b_l)]=2^{-l}.
\end{equation}
\end{definition}
When $W=\{$algorithms that depend on less than or equal to $l$
independence$\}$, $l$-wise independent $G$ works for every $A\in W$.
To construct $G$ that is $l$-wise independent, Let $C$ be a
$[n,t,?]_2$ linear code. s.t. $C^\perp$ is a $[n,n-t,l+1]_2$ linear
code.\\
\begin{claim}
$x\mapsto C(x)$ is a $l$-wise independent generator. \end{claim}
(For the proof of claim 2, See problem set 1, problem 4.)\\
Let $C^\perp$ be a BCH code with distance $(l+1)$. Then, $C^\perp$
is a $[n,n-\lfloor \frac{l}{2}\rfloor$ log $n, l+1]$ code. So $C$ is
a $[n,\lfloor \frac{l}{2}\rfloor$ log $n,?]$ code. And we obtain
$l$-wise independent $G$ s.t.
\begin{equation}
G:\{0,1\}^{\lfloor \frac{1}{2}\rfloor log n}\rightarrow \{0,1\}^n
\end{equation}
For a fixed $l$, $t=\lfloor \frac{1}{2}\rfloor$log $n$ is polynomial
over $n$. So it gives a polynomial sized sample space $\{0,1\}^t$
for all constant $l$.
\section{$\delta$-almost l-wise independence \& $\epsilon$-biased space}
Sometimes $l$-wise independence is ``stronger'' than what we need.
Let $\delta$ be a positive real number.
\begin{definition}
$G: \{0,1\}^t \rightarrow \{0,1\}^n$ is \textrm{$\delta$-almost
$l$-wise independent} if the following holds $\forall T\subseteq
[n], |T|=l$ and $\forall A:\{0,1\}^l\rightarrow \{0,1\}$,
\begin{equation}
|Pr_{s\in \{0,1\}^t}[A(G(s)|_T)=1]-Pr_{y\in\{0,1\}^l}[A(y)=1]|\le
\delta
\end{equation}
\end{definition}
\begin{definition}
$G$ is \textrm{$\epsilon$-biased} if for every non-trivial linear
function $A:\{0,1\}^n\rightarrow\{0,1\}$, if is the case that
\begin{equation}
|Pr_{y\in\{0,1\}^n}[A(y)=1]-Pr_{s\in\{0,1\}^t}[A(G(s))=1|\le
\epsilon.\end{equation} \end{definition} Note that for every
nontrivial linear $A$, $Pr_{y\in\{0,1\}^n}[A(y)=1]=\frac{1}{2}$, and
there exist $T_A\subseteq [n]$ s.t. $A(y)=\bigoplus_{i\in T_A}y_i$.
So, (7) becomes
\begin{equation}
\frac{1}{2}-\epsilon \le Pr_{s\in\{0,1\}^t}[A(G(s))=1]\le
\frac{1}{2}+\epsilon
\end{equation}
\begin{proposition} Every $\epsilon$-biased generator also yields a $2^l
\epsilon$-almost $l$-wise independent generator for all $l$.
\end{proposition}
We will not prove this proposition here. Now suppose that we want a
$\frac{1}{n^2}$ -almost log $n$ -wise independent family. For
$\epsilon =\frac{1}{n^3}$, if we are given $\epsilon$-biased $G$, by
setting $l$=log $n$,$G$ is a $\frac{1}{n^2}$-almost log $n$-wise
independent generator as we desired. So now we need to construct a
$\epsilon =\frac{1}{n^3}$-biased space $G$.
\section{construction of $\epsilon$ -biased space $G$}
Let $N=2^t$ and suppose that we are given
$[N,n,(\frac{1}{2}-\epsilon)N]_2$ linear code $C$ with condition
that its maximum weight(number of 1's) codeword has weight at most
$(\frac{1}{2}+\epsilon)N$. Suppose further that $N=
\frac{n}{\epsilon^3}$. Let $n\times N$ matrix $F$ be the generator
matrix of $C$. Let $j:\{0,1\}^t \rightarrow [N]$ be a 1-1
correspondence. For $s\in \{0,1\}^t, 0\le i\le n$ ,define
\begin{equation}G(s)_i=F_{j(s),i}.\end{equation} Then by the property of $C$, for any
nonempty $T\subseteq [n]$, \begin{equation}\frac{1}{2}-\epsilon\le
Pr_{s\in\{0,1\}^t}[\bigoplus_{i\in
T}G(s)_i=1]\le\frac{1}{2}+\epsilon.\end{equation} So, $G$ is an
$\epsilon$-biased space.\\
For $\epsilon=\frac{1}{n^3}, N=\frac{n}{\epsilon^3}=n^{10}$ So, if
$t=$log $N=10 $log $n$ then we can obtain $\frac{1}{n^2}$ -almost
log $n$ -wise independent family.\\
On the contrary to the Pseudo-random generator, random number
extractor extracts ``pure'' random strings from ``contaminated''
random sources. Here contaminated means that it is far from uniform
distribution. It takes $(x,y)$ as input where $x$ is contaminated
random string and $y$ is pure but short random string. Using $x$ and
$y$, extractor tries to get its output $z$ near to uniform
distribution. Generally $z$ is a rather shorter string than $x$. In
the next lecture, we will talk about random number extractor.
\end{document}