\documentclass[12pt]{article} \usepackage{latexsym} \usepackage{amssymb} \title{Algebra 1; MA20008; Sheet 3} \author{{\tt G.C.Smith@bath.ac.uk}} \date{25-x-2004} \begin{document} \maketitle \begin{enumerate} \item {\em Suppose that $V$ is a vector space over $F$, and that $S \subseteq V$. Let $\overline S$ be the intersection of those subspaces of $V$ which contain the subset $S$, or put formally \[ \overline S = \bigcap \{ U \mid U \leq V, S \subseteq U\}.\] Show that $\overline S = \langle S \rangle.$} \newline \noindent {\bf Solution:\ } We have $ S \subseteq \langle S \rangle \leq V$. Now $\langle S \rangle$ is therefore one of the sets being intersected in the definition of $\overline S.$ Therefore $\overline S \subseteq \langle S \rangle.$ Conversely if $S \subseteq U \leq V$ and ${\bf v} \in \langle S\rangle,$ then ${\bf v} \in U$ since $U$ is closed under the formation of linear combinations. Therefore $\langle S \rangle \subseteq \overline S.$ Since we have both inclusions it follows that $\overline S = \langle S \rangle.$ \item {\em Let $V$ be a vector space over $F$ and ${\bf v_1}, {\bf v_2},\ldots, {\bf v_n} \in V$. Suppose that whenever $\theta_1, \ldots, \theta_n, \psi_1, \ldots, \psi_n \in F$ and $\sum_{i=1}^n \theta_i {\bf v_i} = \sum_{i=i}^n \psi_i {\bf v_i}$, then necessarily $\lambda_i = \mu_i$ for each $i$, $1 \leq i \leq n$. Show that ${\bf v_1}, {\bf v_2},\ldots, {\bf v_n} \in V$ is linearly independent.} \newline \noindent {\bf Solution:\ } It suffices to choose $\psi_i = 0$ for every $i$, and we obtain the condition for linear independence. \item {\em Consider $V = \mathbb R$ as a vector space over $\mathbb Q$.} \begin{enumerate} \item {\em Show that $1, \sqrt 2, \sqrt 3$ are linearly independent.} \newline \noindent {\bf Solution:\ } It is first year work to show that $\sqrt 2,\ \sqrt3$ and $\sqrt 6$ are irrational, and we assume that you can do this. Suppose that $a + b \sqrt 2 + c \sqrt 3 = 0$ for rational $a,b$ and $c$. Therefore $(b \sqrt 2 + c \sqrt 3)^2 \in \mathbb Q$, so $bc \sqrt 6 \in \mathbb Q.$ Now $bc =0$ else $\sqrt 6$ would be rational. If only one of $b$ and $c$ were $0$, then $\sqrt 2$ or $\sqrt 3$ would be rational, which it isn't. Therefore $b = c = 0$, so $a=0$ and we are done. \item {\em Let $\alpha = e^{\frac{\pi i}{3}}$. Which lists of the form $1, \alpha, \ldots, \alpha^n$ are linearly independent? Justify your answer.} \newline \noindent {\bf Solution:\ } $\alpha$ is a root of $X^3+1 = (X+1)(X^2-X+1)$ but not of $X+1$ so $\alpha$ is a root of $X^2-X+1$. Thus $1 - \alpha + \alpha^2 = 0$ so $1, \alpha, \alpha^2$ is a linearly dependent list, as is $1, \alpha, \ldots, \alpha^n$ whenever $n \geq 2$. Also 1 is linearly independent, and $1, \alpha$ is linearly independent, since a non-trivial linear relation would force $\alpha \in \mathbb R$, which is false. \item {\em Suppose that $1, \beta, \beta^2, \ldots, \beta^n$ are linearly independent. Show that $1, (\beta +1), (\beta +1)^2, \ldots, (\beta +1)^n$ are linearly independent.} \newline \noindent {\bf Solution:\ } Suppose, for contradiction, that these powers of $1+\beta$ are linearly dependent. Thus there is a non-zero polynomial $f$ (or $f(X)$) with rational coefficients so that $f(1 + \beta) = 0$. Now $\beta$ will be a root of $h := f(X+1)$, and $\deg h = \deg f$ (and indeed the leading coefficients co-incide). Therefore $h$ is not the zero polynomial and the given powers of $\beta$ satisfy a non-trivial linear relation. However, we are given that these powers of $\beta$ are linearly independent over $\mathbb Q,$ so this is absurd. We have the required contradiction. \end{enumerate} \item {\em Suppose that $X$ and $Y$ are both linearly independent subsets of $V$. Does it follow that $X \cap Y$ is linearly independent? What about $X \cup Y$?} \newline \noindent {\bf Solution:\ } A subset of a l.i. set of vectors is l.i. for formal reasons, and $X \cap Y \subseteq X$, so we are done. However, the same is not true for the formation of unions. Let $V = F = \mathbb R$. Let $X = \{1\},\ Y = \{2\}$ which are both l.i., but $X \cup Y = \{1,2\}$ which is l.d. because $2 \cdot 1 + (-1) \cdot 2 = 0.$ \item {\em Suppose that $V = U \oplus W$. We are given a sets of vectors $X \subseteq U$ and $Y \subseteq W$. Is $X \cup Y$ necessarily a linearly independent set of vectors?}\newline \noindent {\bf Solution:\ } We have proved that a direct sum yields uniqueness of decomposition, so if ${\bf v} \in V$ and ${\bf v} = {\bf u_1} + {\bf w_1} = {\bf u_2} + {\bf w_2}$ for ${\bf u_1}, {\bf u_2} \in U$ and ${\bf w_1}, {\bf w_2} \in W$, then ${\bf u_1}= {\bf u_2}$ and ${\bf w_1} = {\bf w_2}$. Now suppose that we have scalars $\lambda_i, \theta_j$ so that \[ \sum_{i=1}^m \lambda_i {\bf u_i} + \sum_{j=1}^n \theta_j {\bf w_j} = {\bf 0}.\] Here ${\bf u_1}, \ldots, {\bf u_m}\in X$ and $ {\bf v_1}, \ldots, {\bf v_n}\in Y$. The uniqueness of expression, compared to ${\bf 0} + {\bf 0} = {\bf 0}$, ensures that both \[ \sum_{i=1}^m \lambda_i {\bf u_i} = {\bf 0}\] and \[ \sum_{i=1}^n \theta_j {\bf w_j} = {\bf 0}.\] The l.i. of both $X$ and $Y$ forces all scalars to vanish, and we are done. \item {\em Suppose that ${\bf v_1}, {\bf v_2},\ldots, {\bf v_n}$ is a linearly independent list of vectors in the vector space $V$. We are given ${\bf w} \in V$. Does it follow that \[{\bf v_1}+ {\bf w}, {\bf v_2}+ {\bf w}, \ldots, {\bf v_n} + {\bf w}\] are linearly independent?}\newline \noindent {\bf Solution:\ } No. Choose $ {\bf w} = -{\bf v_1}$ and the zero vector occurs in the list. \item {\em Suppose that ${\bf v_1}, {\bf v_2},\ldots, {\bf v_n}$ is a linearly dependent list of vectors in the vector space $V$. We are given ${\bf w} \in V$. Does it follow that \[{\bf v_1}+ {\bf w}, {\bf v_2}+ {\bf w}, \ldots, {\bf v_n} + {\bf w}\] is linearly dependent?}\newline \noindent {\bf Solution:\ } No. Let $V = \mathbb R$, $F = \mathbb R$. Let $n=1$ and ${\bf v_1}= {\bf 0}.$ Let ${\bf w}=1$. \item {\em Let $V = \mathbb R^3$ viewed as vector space over $\mathbb R$. Let ${\bf v_1}, \ldots, {\bf v_8}$ be the position vectors of the vertices of a cube.} \begin{enumerate} \item {\em Let \[ A = \left\{ \sum_i \lambda_i {\bf v_i} \mid 0 \leq \lambda_i \leq 1 \mbox{ for all } i, \sum_i \lambda_i = 1\right\}. \] Describe the set $A$, viewed as a collection of position vectors, geometrically.} \newline \noindent {\bf Solution:\ } The given position vectors point to the points inside and on the surface of the cube. \item {\em Let \[ B = \left\{ \sum_i \lambda_i {\bf v_i} \mid \lambda_i \geq 0 \mbox{ for all } i, \right\}. \] Under what circumstances is $B = \mathbb R^3$? Under what circumstances is $B$ a closed half space (i.e. one side of a plane and all the points on that plane)? What other shapes can arise?} \newline \noindent {\bf Solution:\ } We have $B = \mathbb R^3$ exactly when the origin is strictly inside the cube. If the origin is in the interior of a face, then $B$ is a half-space. If the origin is in the interior of an edge, then $B$ is the intersection of two half-spaces defined by perpendicular planes. If the origin is at a vertex of the cube, then $B$ is the intersection of three half-spaces defined by pairwise perpendicular planes (also known as an octant). If the origin is strictly outside the cube, then $B$ will be an infinite cone with finitely many planar faces. \end{enumerate} \end{enumerate} \end{document}