Skip to content

Commit 25acff6

Browse files
committed
week 13v1
1 parent d5df41c commit 25acff6

File tree

5 files changed

+238
-97
lines changed

5 files changed

+238
-97
lines changed

RTH-0010/main.tex

Lines changed: 9 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -500,7 +500,7 @@ \subsection*{Orthogonal Decomposition of $\vec{x}$}
500500

501501
\section*{Practice Problems}
502502
\begin{problem}\label{prob:rref_way}
503-
Retry Example~\ref{fourier} using Gaussian elimination. Which method seems easier to you?
503+
Retry Example~\ref{fourier} using Gaussian elimination. %Which method seems easier to you?
504504
\end{problem}
505505

506506
\begin{problem}\label{prob:vec_eq_0}
@@ -510,27 +510,26 @@ \section*{Practice Problems}
510510
Show that $\vec{v}=\vec{0}$.
511511
\end{problem}
512512

513-
\emph{Problems \ref{OrthoProj1.1}-\ref{OrthoProj1.3}}
514-
513+
\begin{problem}\label{OrthoProj1.1}
515514
Let $\vec{x} = \begin{bmatrix}1\\ -2\\ 1\\ 6\end{bmatrix}$ in $\RR^4$, and let $W = \mbox{span}\left(\begin{bmatrix}2\\ 1\\ 3\\ -4\end{bmatrix}, \begin{bmatrix}1\\ 2\\ 0\\ 1\end{bmatrix}\right)$.
516515

517-
\begin{problem}\label{OrthoProj1.1}
516+
\begin{question}
518517
Compute $\mbox{proj}_W(\vec{x})$.
519518

520519
Answer: $$\frac{1}{10}\begin{bmatrix}\answer{-9}\\\answer{3}\\\answer{-21}\\\answer{33}\end{bmatrix}$$
521-
\end{problem}
520+
\end{question}
522521

523-
\begin{problem}\label{OrthoProj1.2}
522+
\begin{question}
524523
Show that $\left\{\begin{bmatrix}1\\ 0\\ 2\\ -3\end{bmatrix}, \begin{bmatrix}4\\ 7\\ 1\\ 2\end{bmatrix}\right\}$ is another orthogonal basis of $W$.
525-
\end{problem}
524+
\end{question}
526525

527-
\begin{problem}\label{OrthoProj1.3}
528-
Use the basis in Problem \ref{OrthoProj1.2} to compute $\mbox{proj}_W(\vec{x})$.
526+
\begin{question}
527+
Use the basis you found in the previous part to compute $\mbox{proj}_W(\vec{x})$.
529528

530529
Answer: $$\frac{1}{70}\begin{bmatrix}\answer{-63}\\\answer{21}\\\answer{-147}\\\answer{231}\end{bmatrix}$$
530+
\end{question}
531531
\end{problem}
532532

533-
534533
\begin{problem}\label{prob:proofCor}
535534
Prove Corollary \ref{cor:orthProjOntoW}
536535
\end{problem}

RTH-0030/main.tex

Lines changed: 12 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -92,7 +92,7 @@ \subsection*{Best Approximate Solutions}
9292

9393
What we observed above, holds in general. We will use this fact to find $\vec{z}$.
9494

95-
Every vector in $\text{col}(A)$ can be written in the form $A\vec{x}$ for some $\vec{x}$ in $\RR^m$. Our goal is to find $\vec{z}$ such that $A\vec{z}$ is the orthogonal projection of $\vec{b}$ onto $\text{col}(A)$. By Corollary \ref{cor:orthProjOntoW}, every vector $A\vec{x}$ in $\text{col}(A)$ is orthogonal to $\vec{b}-A\vec{z}$. This means $\vec{b}-A\vec{z}$ is in the orthogonal complement of $\text{col}(A)$, which is $\text{null}(A^T)$.
95+
Every vector in $\text{col}(A)$ can be written in the form $A\vec{x}$ for some $\vec{x}$ in $\RR^m$. Our goal is to find $\vec{z}$ such that $A\vec{z}$ is the orthogonal projection of $\vec{b}$ onto $\text{col}(A)$. By Corollary \ref{cor:orthProjOntoW}, every vector $A\vec{x}$ in $\text{col}(A)$ is orthogonal to $\vec{b}-A\vec{z}$. %This means $\vec{b}-A\vec{z}$ is in the orthogonal complement of $\text{col}(A)$, which is $\text{null}(A^T)$.
9696

9797
Therefore, we have
9898
%{\begin{eqnarray*}
@@ -418,11 +418,9 @@ \section*{Practice Problems}
418418
$$x=\answer{\frac{-20}{12}},\quad y=\answer{\frac{46}{12}},\quad z=\answer{\frac{95}{12}}$$
419419
\end{problem}
420420

421-
\emph{Problems \ref{prob:leastSq2a}-\ref{prob:leastSq2b}}
422-
423-
Find a linear function of best fit for each of the following sets of data points. Examine how well your line fits the points by typing the equation of the line into the Desmos window.
424421

425422
\begin{problem}\label{prob:leastSq2a}
423+
Find a linear function of best fit for the given set of data points. Examine how well your line fits the points by typing the equation of the line into the Desmos window.
426424
$$(2,4), (4,3), (7,2), (8,1)$$
427425
Enter your answers in fraction form.
428426
$$f(x)=\answer{\frac{-6}{13}}x+\answer{\frac{64}{13}}$$
@@ -434,6 +432,7 @@ \section*{Practice Problems}
434432
\end{problem}
435433

436434
\begin{problem}\label{prob:leastSq2b}
435+
Find a linear function of best fit for the given set of data points. Examine how well your line fits the points by typing the equation of the line into the Desmos window.
437436
$$(-2, 3), (-1,1), (0,0), (1, -2), (2, -4)$$
438437

439438
$$f(x)=\answer{-1.7}x+\answer{-0.4}$$
@@ -449,21 +448,21 @@ \section*{Practice Problems}
449448
$$(-2,1),(0,0),(3,2),(4,3)$$
450449
Round your answers to three decimal places.
451450
$$f(x)=\answer{0.194}x^2+\answer{-0.024}x+\answer{0.127}$$
452-
For more information about doing least squares with Octave, please see \href{https://ximera.osu.edu/linearalgebradzv3/xOctave/OCT_orth/main}{Octave for Chapter 9}.
451+
For more information about doing least squares with Octave, please see \href{https://ximera.osu.edu/corela/xOctave/OCT_orth/main}{Octave for Chapter 9}.
453452
\end{problem}
454453

455-
\begin{problem}\label{ex:5_6_14}
456-
If $A$ is an $m \times n$ matrix, it can be proved that there exists a unique $n \times m$ matrix $A^{\#}$ satisfying the following four conditions: $AA^{\#}A = A$; $A^{\#}AA^{\#} = A^{\#}$; $AA^{\#}$ and $A^{\#}A$ are symmetric. The matrix $A^{\#}$ is called the \emph{Moore-Penrose} inverse.
454+
% \begin{problem}\label{ex:5_6_14}
455+
% If $A$ is an $m \times n$ matrix, it can be proved that there exists a unique $n \times m$ matrix $A^{\#}$ satisfying the following four conditions: $AA^{\#}A = A$; $A^{\#}AA^{\#} = A^{\#}$; $AA^{\#}$ and $A^{\#}A$ are symmetric. The matrix $A^{\#}$ is called the \emph{Moore-Penrose} inverse.
457456

458-
\begin{enumerate}
459-
\item If $A$ is square and invertible, show that $A^{\#} = A^{-1}$.
457+
% \begin{enumerate}
458+
% \item If $A$ is square and invertible, show that $A^{\#} = A^{-1}$.
460459

461-
\item If $\text{rank} A = m$, show that $A^{\#} = A^{T}(AA^{T})^{-1}$.
460+
% \item If $\text{rank} A = m$, show that $A^{\#} = A^{T}(AA^{T})^{-1}$.
462461

463-
\item If $\text{rank} A = n$, show that $A^{\#} = (A^{T}A)^{-1}A^{T}$. (Notice the appearance of the Moore-Penrose inverse arrived when we solve the normal equations, arriving at Equation (\ref{eq:leastSquaresZ})).
462+
% \item If $\text{rank} A = n$, show that $A^{\#} = (A^{T}A)^{-1}A^{T}$. (Notice the appearance of the Moore-Penrose inverse arrived when we solve the normal equations, arriving at Equation (\ref{eq:leastSquaresZ})).
464463

465-
\end{enumerate}
466-
\end{problem}
464+
% \end{enumerate}
465+
% \end{problem}
467466

468467

469468
% In many scientific investigations, data are collected that relate two variables. For example, if $x$ is the

RTH-0035/main.tex

Lines changed: 85 additions & 74 deletions
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@ \section*{Orthogonal Matrices and Symmetric Matrices}
2121
As we have seen, the nice bases of $\RR^n$ are the orthogonal ones, so a natural question is: which $n \times n$ matrices have $n$ orthogonal eigenvectors, so that columns of $P$ form an orthogonal basis for $\RR^n$? These turn out to be precisely the \dfn{symmetric matrices} (matrices for which $A=A^T$), and this is the main result of this section.
2222

2323
\section*{Orthogonal Matrices}
24-
Recall that an orthogonal set of vectors is called \dfn{orthonormal} if $\norm{\vec{q}} = 1$ for each vector $\vec{q}$ in the set, and that any orthogonal set $\{\vec{v}_{1}, \vec{v}_{2}, \dots, \vec{v}_{k}\}$ can be ``\textit{normalized}'', i.e. converted into an orthonormal set $\left\{ \frac{1}{\norm{\vec{v}_{1}}}\vec{v}_{1}, \frac{1}{\norm{\vec{v}_{2}}}\vec{v}_{2}, \dots, \frac{1}{\norm{\vec{v}_{k}}}\vec{v}_{k} \right\}$. In particular, if a matrix $A$ has $n$ orthogonal eigenvectors, they can (by normalizing) be taken to be orthonormal. The corresponding diagonalizing matrix (we will use $Q$ instead of $P$) has orthonormal columns, and such matrices are very easy to invert.
24+
A collection of non-zero, pairwise orthogonal vectors in $\RR^n$ is called an \dfn{orthogonal} set of vectors. An orthogonal set of vectors is called \dfn{orthonormal} if $\norm{\vec{q}} = 1$ for each vector $\vec{q}$ in the set. A set of orthogonal vectors $\{\vec{v}_{1}, \vec{v}_{2}, \dots, \vec{v}_{k}\}$ can be ``\textit{normalized}'', i.e. converted into an orthonormal set $\left\{ \frac{1}{\norm{\vec{v}_{1}}}\vec{v}_{1}, \frac{1}{\norm{\vec{v}_{2}}}\vec{v}_{2}, \dots, \frac{1}{\norm{\vec{v}_{k}}}\vec{v}_{k} \right\}$. In particular, if a matrix $A$ has $n$ orthogonal eigenvectors, they can (by normalizing) be taken to be orthonormal. The corresponding diagonalizing matrix (we will use $Q$ instead of $P$) has orthonormal columns, and such matrices are very easy to invert.
2525

2626

2727
\begin{theorem}\label{th:orthogonal_matrices}
@@ -263,7 +263,7 @@ \section*{Symmetric Matrices}
263263
\end{proof}
264264

265265

266-
Because the eigenvalues of a real symmetric matrix are real, Theorem~\ref{th:PrinAxes} is also called the \dfn{Real Spectral Theorem}, and the set of distinct eigenvalues is called the \dfn{spectrum} of the matrix. A similar result holds for matrices with complex entries (Theorem \ref{th:025890}).
266+
Because the eigenvalues of a real symmetric matrix are real, Theorem~\ref{th:PrinAxes} is called the \dfn{Real Spectral Theorem}, and the set of distinct eigenvalues is called the \dfn{spectrum} of the matrix. A similar result holds for matrices with complex entries (Theorem \ref{th:025890}).
267267

268268

269269
\begin{example}\label{ex:DiagonalizeSymmetricMatrix}
@@ -274,13 +274,13 @@ \section*{Symmetric Matrices}
274274
\end{bmatrix}$.
275275

276276
\begin{explanation}
277-
The characteristic polynomial of $A$ is (adding twice row 1 to row 2):
277+
The characteristic equation of $A$ is (adding twice row 1 to row 2):
278278
\begin{equation*}
279-
c_{A}(z) = \det \begin{bmatrix}
280-
z - 1 & 0 & 1 \\
281-
0 & z - 1 & -2 \\
282-
1 & -2 & z - 5
283-
\end{bmatrix} = z(z - 1)(z - 6)
279+
\det \begin{bmatrix}
280+
1-\lambda & 0 & 1 \\
281+
0 & 1-\lambda & -2 \\
282+
1 & -2 & 5-\lambda
283+
\end{bmatrix} = \lambda(\lambda - 1)(\lambda - 6)=0
284284
\end{equation*}
285285
Thus the eigenvalues are $\lambda = 0$, $1$, and $6$, and corresponding eigenvectors are
286286
\begin{equation*}
@@ -371,13 +371,13 @@ \section*{Symmetric Matrices}
371371

372372

373373
\begin{explanation}
374-
The characteristic polynomial is
374+
The characteristic equation is
375375
\begin{equation*}
376-
c_{A}(z) = \det \begin{bmatrix}
377-
z-8 & 2 & -2 \\
378-
2 & z-5 & -4 \\
379-
-2 & -4 & z-5
380-
\end{bmatrix} = z(z-9)^2
376+
\det \begin{bmatrix}
377+
8-\lambda & 2 & -2 \\
378+
2 & 5-\lambda & -4 \\
379+
-2 & -4 & 5-\lambda
380+
\end{bmatrix} = \lambda(\lambda-9)^2=0
381381
\end{equation*}
382382
Hence the distinct eigenvalues are $0$ and $9$ are of algebraic multiplicity $1$ and $2$, respectively. The geometric multiplicities must be the same, for $A$ is diagonalizable, being symmetric. It follows that $\mbox{dim}(\mathcal{S}_0) = 1$ and $\mbox{dim}(\mathcal{S}_9) = 2$. Gaussian elimination gives
383383
\begin{equation*}
@@ -395,7 +395,7 @@ \section*{Symmetric Matrices}
395395
1
396396
\end{bmatrix} \right\rbrace
397397
\end{equation*}
398-
The eigenvectors in $\mathcal{S}_{9}$ are both orthogonal to $\vec{x}_{1}$ as Theorem~\ref{th:symmetric_has_ortho_ev} guarantees, but not to each other. However, the Gram-Schmidt process yields an orthogonal basis
398+
The eigenvectors in $\mathcal{S}_{9}$ are both orthogonal to $\vec{x}_{1}$ as Theorem~\ref{th:symmetric_has_ortho_ev} guarantees, but not to each other. However, an orthogonal basis can be found using projections. (See \href{https://ximera.osu.edu/corela/LinearAlgebraInteractiveIntro/RTH-0015/main}{Gram-Schmidt Orthogonalization})
399399
\begin{equation*}
400400
\{\vec{f}_{2}, \vec{f}_{3}\} \mbox{ of } \mathcal{S}_{9}(A) \quad \mbox{ where } \quad \vec{f}_{2} = \begin{bmatrix}
401401
-2 \\
@@ -493,58 +493,66 @@ \section*{Symmetric Matrices}
493493

494494
\section*{Practice Problems}
495495

496-
\begin{problem}\label{prob:ortho_diag_implies_symmetric}
497-
Suppose $A$ is orthogonally diagonalizable. Prove that $A$ is symmetric. (This is the easy direction of the "if and only if" in Theorem \ref{th:PrinAxes}.)
498-
\end{problem}
499-
500-
\emph{Problems \ref{prob:make_ortho_matrix3}-\ref{prob:make_ortho_matrix7}}
501-
502-
Normalize the rows to make each of the following matrices orthogonal.
503-
504-
505496
\begin{problem}\label{prob:make_ortho_matrix3}
497+
Normalize the columns to make the following matrix orthogonal.
506498
$A = \begin{bmatrix}
507-
1 & 2 \\
508-
-4 & 2
499+
1 & -4 \\
500+
2 & 2
509501
\end{bmatrix}$
502+
503+
$$\frac{1}{\sqrt{\answer{5}}}\begin{bmatrix}\answer{1} & \answer{-2}\\\answer{2} & \answer{1}\end{bmatrix}$$
510504
\end{problem}
511505

512506
\begin{problem}\label{prob:make_ortho_matrix7}
507+
Normalize the columns to make the following matrix orthogonal.
513508
$A = \begin{bmatrix}
514509
-1 & 2 & 2 \\
515510
2 & -1 & 2 \\
516511
2 & 2 & -1
517512
\end{bmatrix}$
513+
$$\frac{1}{\answer{3}}\begin{bmatrix}\answer{-1} & \answer{2} & \answer{2} \\
514+
\answer{2} & \answer{-1} & \answer{2} \\
515+
\answer{2} & \answer{2} & \answer{-1}
516+
\end{bmatrix}$$
518517
\end{problem}
519518

520519
\begin{problem}\label{prob:findQ}
521520
For each matrix $A$, find an orthogonal matrix $Q$ such that $Q^{-1}AQ$ is diagonal.
522521

523-
\begin{enumerate}
524-
\item\label{prob:findQa} $A = \begin{bmatrix}
522+
\begin{question}
523+
$$A = \begin{bmatrix}
525524
0 & 1 \\
526525
1 & 0
527-
\end{bmatrix}$
526+
\end{bmatrix}$$
527+
$$Q=\frac{1}{\sqrt{\answer{2}}}\begin{bmatrix}1 & -1\\\answer{1} & \answer{1}\end{bmatrix}$$
528+
\end{question}
528529

529-
\item\label{prob:findQd} $A = \begin{bmatrix}
530+
\begin{question}
531+
$$A = \begin{bmatrix}
530532
3 & 0 & 7 \\
531533
0 & 5 & 0 \\
532534
7 & 0 & 3
533-
\end{bmatrix}$
534-
535-
\item\label{prob:findQe} $A = \begin{bmatrix}
536-
1 & 1 & 0 \\
537-
1 & 1 & 0 \\
538-
0 & 0 & 2
539-
\end{bmatrix}$
535+
\end{bmatrix}$$
536+
List eigenvalues in increasing order.
537+
$$\lambda_1=\answer{-4},\quad\lambda_2=\answer{5},\quad\lambda_3=\answer{10}$$
538+
$$Q^{-1}AQ=\begin{bmatrix}\answer{-1}/\sqrt{\answer{2}} & \answer{0} & \answer{1}/\sqrt{\answer{2}}\\\answer{0} & \answer{1} & \answer{0}\\1/\sqrt{\answer{2}} & \answer{0} & 1/\sqrt{\answer{2}}\end{bmatrix}\begin{bmatrix}\lambda_1 & 0 & 0\\0 & \lambda_2 & 0\\0 & 0& \lambda_3\end{bmatrix}\begin{bmatrix}\answer{-1}/\sqrt{\answer{2}} & \answer{0} & \answer{1}/\sqrt{\answer{2}}\\\answer{0} & \answer{1} & \answer{0}\\1/\sqrt{\answer{2}} & \answer{0} & 1/\sqrt{\answer{2}}\end{bmatrix}$$
539+
\end{question}
540+
541+
% \begin{question}
542+
% $$A = \begin{bmatrix}
543+
% 1 & 1 & 0 \\
544+
% 1 & 1 & 0 \\
545+
% 0 & 0 & 2
546+
% \end{bmatrix}$$
547+
% \end{question}
548+
549+
% (challenging problem) $A = \begin{bmatrix}
550+
% 3 & 5 & -1 & 1 \\
551+
% 5 & 3 & 1 & -1 \\
552+
% -1 & 1 & 3 & 5 \\
553+
% 1 & -1 & 5 & 3
554+
% \end{bmatrix}$
540555

541-
\item\label{prob:findQg} (challenging problem) $A = \begin{bmatrix}
542-
3 & 5 & -1 & 1 \\
543-
5 & 3 & 1 & -1 \\
544-
-1 & 1 & 3 & 5 \\
545-
1 & -1 & 5 & 3
546-
\end{bmatrix}$
547-
\end{enumerate}
548556

549557
%\begin{sol}
550558
%\begin{enumerate}
@@ -584,6 +592,9 @@ \section*{Practice Problems}
584592
%\end{sol}
585593
\end{problem}
586594

595+
\begin{problem}\label{prob:ortho_diag_implies_symmetric}
596+
Suppose $A$ is orthogonally diagonalizable. Prove that $A$ is symmetric. (This is the easy direction of the "if and only if" in Theorem \ref{th:PrinAxes}.)
597+
\end{problem}
587598

588599
\begin{problem}\label{ex:8_2_15}
589600
Prove the converse of Theorem~\ref{th:dotpSymmetric}:
@@ -605,16 +616,16 @@ \section*{Practice Problems}
605616

606617
\item Give $2 \times 2$ examples of $Q$ such that $\det Q = 1$ and $\det Q = -1$.
607618

608-
\item If $\det Q = -1$, show that $I + Q$ has no inverse.
609-
\begin{hint}
610-
$Q^{T}(I + Q) = (I + Q)^{T}$.
611-
\end{hint}
619+
% \item If $\det Q = -1$, show that $I + Q$ has no inverse.
620+
% \begin{hint}
621+
% $Q^{T}(I + Q) = (I + Q)^{T}$.
622+
% \end{hint}
612623

613-
\item If $P$ is $n \times n$ and $\det P \neq (-1)^{n}$, show that $I - P$ has no inverse.
624+
% \item If $P$ is $n \times n$ and $\det P \neq (-1)^{n}$, show that $I - P$ has no inverse.
614625

615-
\begin{hint}
616-
$P^{T}(I - P) = -(I - P)^{T}$
617-
\end{hint}
626+
% \begin{hint}
627+
% $P^{T}(I - P) = -(I - P)^{T}$
628+
% \end{hint}
618629
\end{enumerate}
619630
%\begin{sol}
620631
%\begin{enumerate}
@@ -638,41 +649,41 @@ \section*{Practice Problems}
638649
\end{problem}
639650

640651

641-
\begin{problem}\label{prob:ortho21}
642-
A matrix that we obtain from the identity matrix by writing its rows in a different order is called a \dfn{permutation matrix} (see Theorem \ref{th:LUPA}). Show that every permutation matrix is orthogonal.
643-
\end{problem}
652+
% \begin{problem}\label{prob:ortho21}
653+
% A matrix that we obtain from the identity matrix by writing its rows in a different order is called a \dfn{permutation matrix} (see Theorem \ref{th:LUPA}). Show that every permutation matrix is orthogonal.
654+
% \end{problem}
644655

645656

646-
\begin{problem}\label{prob:ortho25}
647-
Show that the following are equivalent for an $n \times n$ matrix $Q$.
657+
% \begin{problem}\label{prob:ortho25}
658+
% Show that the following are equivalent for an $n \times n$ matrix $Q$.
648659

649660

650-
\begin{enumerate}
651-
\item $Q$ is orthogonal.
661+
% \begin{enumerate}
662+
% \item $Q$ is orthogonal.
652663

653-
\item $\norm{Q\vec{x}} = \norm{\vec{x}}$ for all $\vec{x}\in\RR^n$.
664+
% \item $\norm{Q\vec{x}} = \norm{\vec{x}}$ for all $\vec{x}\in\RR^n$.
654665

655-
\item $\norm{ Q\vec{x} - Q\vec{y}} = \norm{\vec{x} - \vec{y}}$ for all $\vec{x}$, $\vec{y}\in \RR^n$.
666+
% \item $\norm{ Q\vec{x} - Q\vec{y}} = \norm{\vec{x} - \vec{y}}$ for all $\vec{x}$, $\vec{y}\in \RR^n$.
656667

657-
\item $(Q\vec{x}) \dotp (Q\vec{y}) = \vec{x} \dotp \vec{y}$ for all columns $\vec{x}$, $\vec{y}\in\RR^n$.
668+
% \item $(Q\vec{x}) \dotp (Q\vec{y}) = \vec{x} \dotp \vec{y}$ for all columns $\vec{x}$, $\vec{y}\in\RR^n$.
658669

659-
\begin{hint}
660-
%For (c) $\Rightarrow$ (d), see Exercise \ref{ex:5_3_14}(a).
661-
For (d) $\Rightarrow$ (a), show that column $i$ of $Q$ equals $Q\vec{e}_{i}$, where $\vec{e}_{i}$ is column $i$ of the identity matrix.
662-
\end{hint}
663-
\end{enumerate}
670+
% \begin{hint}
671+
% %For (c) $\Rightarrow$ (d), see Exercise \ref{ex:5_3_14}(a).
672+
% For (d) $\Rightarrow$ (a), show that column $i$ of $Q$ equals $Q\vec{e}_{i}$, where $\vec{e}_{i}$ is column $i$ of the identity matrix.
673+
% \end{hint}
674+
% \end{enumerate}
664675

665-
\begin{remark}
666-
This exercise shows that linear transformations with orthogonal standard matrices are distance-preserving (b,c) and angle-preserving (d).
667-
\end{remark}
676+
% \begin{remark}
677+
% This exercise shows that linear transformations with orthogonal standard matrices are distance-preserving (b,c) and angle-preserving (d).
678+
% \end{remark}
668679

669-
\end{problem}
680+
% \end{problem}
670681

671682

672683

673684

674-
\begin{problem}\label{prob:SchurChallenge}
675-
Modify the proof of Theorem~\ref{th:PrinAxes} to prove Theorem \ref{th:Schur}.
685+
% \begin{problem}\label{prob:SchurChallenge}
686+
% Modify the proof of Theorem~\ref{th:PrinAxes} to prove Theorem \ref{th:Schur}.
676687
%If $A\vec{x}_{1} = \lambda_{1}\vec{x}_{1}$ where $\norm{\vec{x}_{1}} = 1$, let $\{\vec{x}_{1}, \vec{x}_{2}, \dots, \vec{x}_{n}\}$ be an orthonormal basis of $\RR^n$, and let $P_{1} = \begin{bmatrix}
677688
%\vec{x}_{1} & \vec{x}_{2} & \cdots & \vec{x}_{n}
678689
%\end{bmatrix}$. Then $P_{1}$ is orthogonal and $P_{1}^TAP_{1} = \begin{bmatrix}
@@ -686,7 +697,7 @@ \section*{Practice Problems}
686697
%0 & T_{1}
687698
%\end{bmatrix}$
688699
% is upper triangular.
689-
\end{problem}
700+
% \end{problem}
690701

691702
\section*{Text Source} This section was adapted from Section 8.2 of Keith Nicholson's \href{https://open.umn.edu/opentextbooks/textbooks/linear-algebra-with-applications}{\it Linear Algebra with Applications}. (CC-BY-NC-SA)
692703

0 commit comments

Comments
 (0)