\input{template}
\input{macros}

\begin{document}
\lecture{3} {Linear algebra I}{Luv Kumar}
\section{Vector Space}
A \emph{vector space} is defined as a set of vectors $\mbox{\bf V}$ and the real numbers $\rea$ (called \emph{scalars}) with the following operations defined:
\begin{itemize}
\item \textbf{Vector Addition:} $\mbox{\bf V} \times \mbox{\bf V} \rightarrow \mbox{\bf V}$, represented as $\textbf{u} + \textbf{v}$, where $\textbf{u}, \textbf{v} \in \mbox{\bf V}$.
\item \textbf{Scalar Multiplication:} $\rea \times \mbox{\bf V} \rightarrow \mbox{\bf V}$, represented as $a.\textbf{u}$, where $a \in \rea$ and $\textbf{u} \in \mbox{\bf V}$.
\end{itemize}
Following are the properties of a vector space.
\begin{itemize}
\item \textbf{Abelian Group laws:}
\begin{enumerate}
\item \textbf{Associativity:} $\textbf{u} + (\textbf{v} + \textbf{w}) = (\textbf{u} + \textbf{v}) + \textbf{w}$
\item \textbf{Identity:} $\exists$ a zero vector $\overline{\textbf{0}}$ which is the group identity element, i.e.  $\overline{\textbf{0}} + \textbf{u} = \textbf{u}$
\item \textbf{Inverse:} $\forall \bf{u} \in \bf{V}$, there exists the additive inverse $-\bf{u}$ s.t. $\textbf{u} + (-\textbf{u}) = \overline{\textbf{0}}$
\item \textbf{Commutativity:} $\textbf{u} + \textbf{v} = \textbf{v} + \textbf{u}$
\end{enumerate}

\item \textbf{Scalar multiplication laws:} 
\begin{enumerate}
\item \textbf{Multiplication by} 0\textbf{:} $0.\textbf{u} = \overline{\textbf{0}}$
\item \textbf{Multiplication by} -1\textbf{:} $(-1).\textbf{u} = -\textbf{u}$
\item \textbf{Identity multiplication:} $1.\textbf{u} = \textbf{u}$
\item \textbf{Distributivity of vector sum:} $a.(\textbf{u} + \textbf{v}) = a.\textbf{u} + a. \textbf{v}$, where $a \in \rea$ and $\textbf{u},\textbf{v} \in \mbox{\bf V}$
\item \textbf{Distributivity of scalar sum:} $(a+b).\textbf{u} = a.\textbf{u} + b.\textbf{u}$
\item \textbf{Associativity of scalar multiplication:} $a.(b.\textbf{u}) = (ab).\textbf{u}$
\end{enumerate}
\end{itemize}

\section{Subspace}
$\mbox{\bf U} \subseteq \mbox{\bf V}$ is a \emph{subspace} of $\mbox{\bf V}$ if $\mbox{\bf U}$ itself is a vector space, i.e. for all $\textbf{u}_1, \textbf{u}_2 \in \mbox{\bf U}$ and $\alpha \in \rea$, $\textbf{u}_1 + \textbf{u}_2 \in \mbox{\bf U}$ and $\alpha.\textbf{u}_1 \in \mbox{\bf U}$. For example, if $\bf{u} \in \bf{V}$, then $\bf{U} = \{\alpha.\bf{u}$ $\forall \alpha \in \rea \}$ is a subspace. \\
For $\alpha = -1$, $-\textbf{u}_1 \in \mbox{\bf U}$ whenever $\textbf{u}_1 \in \mbox{\bf U}$. Hence, $-\textbf{u}_1 + \textbf{u}_1\in \mbox{\bf U}$. Therefore, $\overline{\textbf{0}}$ is always a member of any subspace.
\begin{Exa} \label{lec3:Exa1}
In a 2-dimensional space, any line passing thru the origin is a subspace. If there is any vector in $\mbox{\bf U}$ that does not lie on this line, then $\mbox{\bf U}$ has to be the entire plane.
\end{Exa}

\section{Linear Dependence, Independence and basis}
\begin{Def} \label{lec3:Def1}
Vectors $\textbf{v}_1, \dots , \textbf{v}_n$ are linearly dependent if there exist $\alpha_1, \dots, \alpha_n \in \rea$, not all zero, such that \[ \sum_{i=1}^{n} \alpha_i.\textbf{v}_i = \overline{\textbf{0}}\]
\end{Def}

\begin{Def} \label{lec3:Def2}
Vectors $\textbf{v}_1, \dots , \textbf{v}_n$ are linearly independent if they are not linearly dependent, i.e. for $\alpha_1, \dots, \alpha_n \in \rea$ \[ \sum_{i=1}^{n}\alpha_i.\textbf{v}_i = \overline{\textbf{0}} ~~ \implies ~~ \alpha_i = 0,~\forall i \]
\end{Def}

Basis of a vector space is defined in terms of linear dependence as follows:
\begin{Def} \label{lec3:Def3}
Vectors $\textbf{v}_1, \dots , \textbf{v}_n$ form the basis of a vector space $\mbox{\bf V}$ iff:
\begin{enumerate}
\item They are linearly independent.
\item Every other vector $\textbf{w}$ which belongs to $\mbox{\bf V}$ can be written as \[ \textbf{w} = \sum_{i=1}^{n} \beta_i.\textbf{v}_i \]
\end{enumerate}
Alternatively, $\textbf{v}_1, \dots , \textbf{v}_n$ form the basis of vector space $\mbox{\bf V}$ if on adding other vector $\textbf{w} \in \mbox{\bf V}$ to this set, the set becomes linearly dependent.
\end{Def}
There can be multiple basis for the same vector space, but all of them will have the same size. Therefore, if $\textbf{v}_1, \dots , \textbf{v}_n$ is a basis, and $\textbf{u}_1, \dots , \textbf{u}_m$ is also a basis, then $m = n$. The number of vectors in the basis is called the \emph{dimension} of the vector space.
\end{document}
