\documentclass[12pt]{article} \usepackage{latexsym} \usepackage{amssymb} \title{Algebra 1; MA20008; Sheet 4 Solutions} \author{{\tt G.C.Smith@bath.ac.uk}} \date{2-xi-2004} \begin{document} \maketitle \begin{enumerate} \item {\em Suppose that $n$ is a natural number or $0$, and $F$ is a field. Show that there is a vector space over $F$ of dimension $n$.}\newline \noindent {\bf Solution\ } The zero space has basis $\emptyset$ and so has dimension $|\emptyset| = 0$. If $n > 0$, then $F^n$ has dimension $n$. \item {\em Suppose that $V$ is a vector space with subspaces $U$ and $W$ both of dimension $n < \infty$. Does it follow that $V$ is finite dimensional? Does it follow that $V$ has dimension $n$? Does it follow that $U = W$? In each case you should supply a reason for your answer.} \newline \noindent {\bf Solution\ } The answers are no, no and no. To illustrate all three points, let $F$ be a field and let $V = F[X]$ be the set of polynomials in $X$ with coefficients in $F$. This $V$ is not a vector space of finite dimension. Let $U$ be the subset of $V$ consisting of polynomials of degree at most 1. Let $W$ be the subset of $V$ consisting of polynomials of degree at most 2 but which have constant term 0. Now $U \not = W$, $\dim U = \dim W = 2$ but $\dim V \not = 2.$ \item {\em Let $V$ be a vector space of dimension $n$. Suppose that $V_0, V_1, \ldots, V_m$ are subspaces of $V$ with \[ V_0 \leq V_1 \leq \cdots \leq V_m.\]} \begin{enumerate} \item {\em Suppose that $m > n$. Show that there is $i \in \{ 1,2, \ldots, m\}$ such that $V_i = V_{i-1}.$} \newline \noindent {\bf Solution\ } The dimensions of the spaces $V_0, V_1, \ldots, V_m$ are weakly increasing. If $m >n$, then two spaces $V_i$ and $V_{i-1}$ must have the same dimension, and therefore (theorem in lectures) $V_{i-1} = V_i.$ \item {\em Suppose that $m \leq n$. Show that it may be that the spaces \[V_0, V_1, \ldots, V_m\] are distinct.} \newline \noindent {\bf Solution\ } Let ${\bf v_1}, {\bf v_2}, \ldots , {\bf v_n}$. Let $V_j$ be the span of ${\bf v_1}, {\bf v_2}, \ldots , {\bf v_j}.$ This does the job. \end{enumerate} \item Suppose that $\alpha : U \longrightarrow W$ is a linear map between vector spaces over the same field. Let ${\bf x_1}, {\bf x_2}, \ldots, {\bf x_n}$ be vectors in $U$. \begin{enumerate} \item {\em Suppose that $U = \langle {\bf x_1}, {\bf x_2}, \ldots, {\bf x_n} \rangle$ and $\alpha$ is surjective. Prove that $W = \langle \alpha({\bf x_1}), \alpha({\bf x_2}), \ldots, \alpha({\bf x_n}) \rangle.$} \newline \noindent {\bf Solution\ } Suppose that ${\bf w} \in W$. Since $\alpha$ is surjective there is ${\bf u} \in U$ sich that $\alpha({\bf u}) = {\bf w}.$ Now ${\bf u} = \sum_i \lambda {\bf x_i}$ so ${\bf w} = \alpha\left(\sum_i \lambda_i {\bf x_i}\right) = \sum_i \lambda_i \alpha({\bf x_i}).$ \item {\em Suppose that ${\bf x_1}, {\bf x_2}, \ldots, {\bf x_n}$ are linearly independent and $\alpha$ is injective. Show that $\alpha({\bf x_1}), \alpha({\bf x_2}), \ldots, \alpha({\bf x_n})$ are linearly independent.} \newline \noindent {\bf Solution\ } Suppose that $\sum_i \lambda_i \alpha({\bf x_i}) = {\bf 0}.$ Therefore $\alpha ( \sum_i \lambda_i {\bf x_i}) = {\bf 0}.$ Now by injectivity $\sum_i \lambda_i {\bf x_i} = {\bf 0}.$ However, the ${\bf x_i}$ are linearly independent, so each $\lambda_i$ is $0$. \end{enumerate} \item {\em Let $\zeta = e^{\frac{2\pi i}{5}} \in \mathbb C.$} \begin{enumerate} \item {\em Suppose that we view $\mathbb C$ as a vector space over $\mathbb Q$. Show that $1, \zeta, \zeta^2, \zeta^3$ are linearly independent.} \newline \noindent {\bf Solution\ } $\zeta$ is a root of $X^4 + X^3 + X^2 + X + 1$. However, $\zeta$ is not a root of any rational polynomial of smaller degree. To see this, note that a smallest degree non-zero rational polynomial having $\alpha$ as a root must divide $X^4 + X^3 + X^2 + X + 1$. First eliminate the possibility of a linear factor, and then a quadratic factor. \item {\em Suppose that we view $\mathbb C$ as a vector space over $\mathbb R$. Show that $1, \zeta, \zeta^2, \zeta^3$ are linearly dependent.} \newline \noindent {\bf Solution\ } $\overline \zeta = \zeta^4 = \zeta^{-1}.$ Now $(X - \zeta)(X - \overline \zeta)$ is a real polynomial of degree 2 which has $\zeta$ as a root. \end{enumerate} \item Let $V$ be a vector space with subspaces $U, W$ such that $U$ and $W$ are both finite dimensional. Let ${\bf u_1}, {\bf u_2}, \ldots, {\bf u_m}$ be a basis of $U$ and ${\bf w_1}, {\bf w_2}, \ldots, {\bf w_n}$ be a basis of $W$. \begin{enumerate} \item {\em Show that $U + W$ is finite dimensional.} \newline \noindent {\bf Solution\ } Certainly $U + W = \langle {\bf u_1}, {\bf u_2}, \ldots, {\bf u_m}, {\bf w_1}, {\bf w_2}, \ldots, {\bf w_n} \rangle$, so $U+W$ has a finite spanning set and therefore a finite basis. \item {\em Show that ${\bf u_1}, {\bf u_2}, \ldots, {\bf u_m}, {\bf w_1}, {\bf w_2}, \ldots, {\bf w_n}$ need not be a basis of $U + W$.} \newline \noindent {\bf Solution\ } Well, perhaps ${\bf u_1} = {\bf w_1}.$ That would do. \item {\em Suppose that $U + W = U \oplus W$. Show that \[{\bf u_1}, {\bf u_2}, \ldots, {\bf u_m}, {\bf w_1}, {\bf w_2}, \ldots, {\bf w_n}\] is a basis of $U+W$.} \newline \noindent {\bf Solution\ } Spanning is not an issue; linear independence is. Suppose that there are scalars $\lambda_i, \mu_i$ such that $\sigma_i (\lambda_i {\bf u_i}) + \sigma_j (\mu_j {\bf w_j}) = {\bf 0}.$ Since $U + W = U \oplus W$ it follows that $\sigma_i (\lambda_i {\bf u_i}) = {\bf 0}$ and $\sigma_i \mu_j {\bf w_j} = {\bf 0}$. Now $\lambda_i$ and $\mu_j$ are $0$ for every $i$ and $j$ by the linear independence of the relevant sequences. \item {\em Suppose that ${\bf u_1}, {\bf u_2}, \ldots, {\bf u_m}, {\bf w_1}, {\bf w_2}, \ldots, {\bf w_n}$ is a basis of $U+W$. Show that $U+W = U \oplus W$. } \newline \noindent {\bf Solution\ } It suffices to show that $U \cap W = 0.$ Suppose not, then there is ${\bf v} \in U \cap W$ with ${\bf v} \not = {\bf 0}$. Now ${\bf v} \in U$ so ${\bf v} = \sum \theta_i {\bf u_i}$ and ${\bf v} = \sum \psi_j {\bf u_j}.$ This yields a non-trivial linear relation $\sum \theta_i {\bf u_i} - \sum \psi_j {\bf w_j}$ among the vectors ${\bf u_i}$ and ${\bf w_i}$, which cannot therefore be linearly independent. \end{enumerate} \item {\em Let $I$ be a set. Let $V$ be the set of real valued functions on $I$; more formally \[ V = \left\{ f \vert f: I \longrightarrow \mathbb R\right\}.\] Define addition on $V$ by $(f + h)(x) := f(x) + g(x)$ for all $x \in I$. If $\lambda \in \mathbb R$ and $f \in V$ we define $\lambda \cdot f \in V$ by $(\lambda \cdot f)(x) = (\lambda)(f(x))$ where the final multiplication is just the product (in $\mathbb R$).} \begin{enumerate} \item {\em Check that $V$ is now a vector space over $\mathbb R$.} \newline \noindent {\bf Solution } This is routine. \item {\em For each $i \in I$, define a function $\delta_i \in V$ where $\delta_i(x) = \delta_{i,x}$ (Kr\"onecker delta). Thus $\delta_i(i) = 1$ and $\delta_i(x) = 0$ if $x \not = i$. Show that the vectors $\delta_i$ are linearly independent.} \newline \noindent {\bf Solution } Suppose that $\lambda_i$ are scalars (all but finitely many of which are $0$). Then $\sum_i' \lambda_i \delta_i = 0$. Choose any $j \in I$< then $\lambda_i$ are scalars (all but finitely many of which are $0$). Then $\sum_i' \lambda_i \delta_i)(j) = 0$ so $\lambda_j = 0$. Thus these maps are linearly independent. \item {\em Let $W = \langle \delta_i : i \in I \rangle$ be the span of all the $\delta_i$. Show that the vectors $\delta_i$ form a basis of $W$ (in that they are a linearly independent spanning set for $W$).} Well they are linearly independent by the previous answer, and course they span $W$ by the design of $W$. \item {\em Show that $W = V$ if and only if $I$ is finite.} $W$ is the subset of $V$ consisting of functions of finite support, i.e. functions which take non-zero values at only finitely many elements of the domain. The two subsets co-incide if and only if $I$ is finite. \item {\em Give an explicit example of a vector space with an clearly describable uncountable basis (no set theoretic metaphysics allowed).} \newline \noindent {\bf Solution} See part (c). \end{enumerate} \end{enumerate} \end{document}