Additional contents of KW23
This commit is contained in:
parent
18afd12ba1
commit
ec25058e87
121
LinAlg2.tex
121
LinAlg2.tex
|
@ -3814,7 +3814,7 @@ Wir haben eine echte Verallgemeinerung.
|
|||
|
||||
\begin{satz}
|
||||
\label{theo:3.6.3}
|
||||
Seien $V, W$ endlich dimensional eeuklidische/unitäre Vektorräume, \\
|
||||
Seien $V, W$ endlich dimensional euklidische/unitäre Vektorräume, \\
|
||||
$\alpha \in \Hom(V, W)$. Dann gilt:
|
||||
\[
|
||||
\alpha^+ \text{ ist pseudoinverse} \iff \begin{aligned}
|
||||
|
@ -3834,26 +3834,31 @@ Wir haben eine echte Verallgemeinerung.
|
|||
U^T \left( \begin{smallmatrix} 1 \\ & \ddots \\ & & 1 \\ & & & 0 \\ & & & & \ddots \\
|
||||
& & & & & 0 \end{smallmatrix} \right) U
|
||||
\]
|
||||
$A^+ A$ gleich.
|
||||
$A^+ A$.
|
||||
\[
|
||||
A A^+ A = U^T \Sigma \underbrace{V V^T}_I \Sigma^+ \underbrace{U U^T}_I \Sigma V
|
||||
= U^T \underbrace{\Sigma \Sigma^+ \Sigma}_\Sigma V = U^T \Sigma V = A
|
||||
\]
|
||||
\item[$\impliedby$:]
|
||||
\begin{itemize}
|
||||
\item $\ker(\alpha) = \ker(\alpha^+ \circ \alpha), \im(\alpha) = \im(\alpha \circ \alpha^+),
|
||||
\ker(\alpha^+) = \ker(\alpha \circ \alpha^+), \im(\alpha^+) = \im(\alpha^+ \circ \alpha)$ \\
|
||||
\tl UE\br $\ker(\alpha) \subseteq \ker(\alpha^+ \circ \alpha) \subseteq \ker(\alpha \circ \alpha^+
|
||||
\circ \alpha) = \ker(\alpha) \implies \ker(\alpha) = \ker(\alpha \circ \alpha^+ \circ \alpha)$
|
||||
\item $\nu := \alpha^+ \circ \alpha$ ist Orthogonalprojektion auf $\ker(\alpha)^\bot$
|
||||
\item $\nu$ selbstadjungiert $\implies \ker(v) \bot \im(v)$
|
||||
\item $\nu \circ \nu = \alpha^+ \circ \underbrace{\alpha \circ \alpha^+ \circ \alpha}_\alpha
|
||||
= \alpha^+ \circ \alpha = \nu$
|
||||
\item $\forall u \in \im(\nu), v \in V$: \[
|
||||
\inner{\nu(v) - v}u = \inner{\nu(v) - v}{\nu(w)} =
|
||||
\inner{\nu^2(v) - \nu(v)}{w} = \inner{0}{w} = 0
|
||||
\]
|
||||
\end{itemize}
|
||||
\item[$\impliedby$:] Steht noch aus
|
||||
% \begin{itemize}
|
||||
% \item \begin{equation} \label{eq:3.6.3.1}
|
||||
% \begin{aligned}
|
||||
% \ker(\alpha) = \ker(\alpha^+ \circ \alpha) && \im(\alpha) = \im(\alpha \circ \alpha^+) \\
|
||||
% \ker(\alpha^+) = \ker(\alpha \circ \alpha^+) && \im(\alpha^+) = \im(\alpha^+ \circ \alpha)
|
||||
% \end{aligned}
|
||||
% \end{equation}
|
||||
% \tl UE\br\,:
|
||||
% $\ker(\alpha) \subseteq \ker(\alpha^+ \circ \alpha) \subseteq \ker(\alpha \circ \alpha^+
|
||||
% \circ \alpha) = \ker(\alpha) \implies \ker(\alpha) = \ker(\alpha \circ \alpha^+ \circ \alpha)$
|
||||
% \item $\nu := \alpha^+ \circ \alpha$ ist Orthogonalprojektion auf $\ker(\alpha)^\bot$
|
||||
% \item $\nu$ selbstadjungiert $\implies \ker(v) \bot \im(v)$
|
||||
% \item $\nu \circ \nu = \alpha^+ \circ \underbrace{\alpha \circ \alpha^+ \circ \alpha}_\alpha
|
||||
% = \alpha^+ \circ \alpha = \nu$
|
||||
% \item $\forall u \in \im(\nu), v \in V$: \[
|
||||
% \inner{\nu(v) - v}u = \inner{\nu(v) - v}{\nu(w)} =
|
||||
% \inner{\nu^2(v) - \nu(v)}{w} = \inner{0}{w} = 0
|
||||
% \]
|
||||
% \end{itemize}
|
||||
\end{itemize}
|
||||
\end{proof}
|
||||
|
||||
|
@ -3909,17 +3914,6 @@ Wird minimal wenn $\lambda_i = \frac{\mu_i}{s_i}, i \in [r]$, insbesondere für
|
|||
$\ontop{\alpha^* \alpha(v) = \alpha^*(w)}{A^* A x = A^* b}$
|
||||
\end{satz}
|
||||
|
||||
\subsubsection{Beispiel (lineare Regression)}
|
||||
\begin{tikzpicture}
|
||||
\end{tikzpicture}
|
||||
$(t_i, y_i)_{i=1}^m$.
|
||||
Suche $f: f(t_i) \sim y_i, \forall i \in [m]$
|
||||
$f(t) = a_0 + a_1 t + a_2 t^2$
|
||||
\[
|
||||
\text{minimiere }
|
||||
\sum_{i=1}^m (f(t_i) - y_i)^2 = \sum_{i=1}^m (a_0 + a_1 t_i + a_2 t_i^2 - y_i)^2 = \norm{A x -b}^2_{\K^m}
|
||||
\]
|
||||
|
||||
\begin{satz}
|
||||
Sei $\alpha \in \Hom(V, W), w \in \im(\alpha)$.
|
||||
Dann gilt mit $v^+ = \alpha^+ (w)$:
|
||||
|
@ -3928,36 +3922,47 @@ $f(t) = a_0 + a_1 t + a_2 t^2$
|
|||
\]
|
||||
\end{satz}
|
||||
|
||||
\subsubsection{Anwendung: Ausgleichsquadrik}
|
||||
Problem: homogenes LGS $Ax=0$. Finde $x$ mit $\norm x = 1$ und $\norm{Ax}$ minimal. \\
|
||||
$b_1, \dots, b_n$ ONB aus EVen von $A^* A$ mit nichtnegativen EWen.
|
||||
\begin{align*}
|
||||
X = \sum \lambda_i b_i \implies \norm{Ax}^2 & = \inner{Ax}{Ax} \\
|
||||
& = \inner{A^* A x}{x} =
|
||||
\inner{\sum s_i \lambda_i b_i}{\sum \lambda_j b_j} = \sum_{i=1}^n s_i \abs{\lambda_i}^2
|
||||
\end{align*}
|
||||
$s_1 \le s_2 \le \dots \le s_n, \norm x = \sum \abs{\lambda_i}^2$
|
||||
\[
|
||||
\frac{\norm{Ax}}{\norm x} = \frac{\sum s_i \abs{\lambda_i}^2}{\sum \abs{\lambda_i}^2} \ge
|
||||
\frac{s_1 \sum \abs{\lambda_i}^2}{\sum \abs{\lambda_i}^2} s_1
|
||||
\]
|
||||
$\norm x = 1 \implies \norm{Ax} \ge s_1$
|
||||
$\norm{b_i} \implies \lambda_1, \lambda_2 = \dots = \lambda_n = 0 \implies \norm{Ab_1} = s_1 \implies b_1$
|
||||
löst unser Minimierungsproblem. \\
|
||||
$Q = \{(x,y) \in \R^2: \psi(x, y) = 0\}, \psi(x, y):= a_1 x^2 + a_2 xy + a_3 y^2 a_4 x + a_5 y + a_6$
|
||||
Gegeben: $(x_i,y_i)^m_{i=1}$ Suche $x = (a_1, \dots, a_6)^T$ mit $\norm x = 1$ sodass
|
||||
\[
|
||||
\sum_{i=1}^m (a_1 x_i^2 + a_2 x_i y_i + a_3 y_i^2 + a_4 x_i + a_5 y_y + a_6)^2
|
||||
\]
|
||||
minimal.
|
||||
$=\norm{Ax}^2, A = \begin{pmatrix}\end{pmatrix}$
|
||||
%\subsubsection{Beispiel (lineare Regression)}
|
||||
%\begin{tikzpicture}
|
||||
%\end{tikzpicture}
|
||||
%$(t_i, y_i)_{i=1}^m$.
|
||||
%Suche $f: f(t_i) \sim y_i, \forall i \in [m]$
|
||||
%$f(t) = a_0 + a_1 t + a_2 t^2$
|
||||
%\[
|
||||
% \text{minimiere }
|
||||
% \sum_{i=1}^m (f(t_i) - y_i)^2 = \sum_{i=1}^m (a_0 + a_1 t_i + a_2 t_i^2 - y_i)^2 = \norm{A x -b}^2_{\K^m}
|
||||
%\]
|
||||
|
||||
\begin{satz*}
|
||||
Sei $A \in \K^{m \times n}$ und $b \in \K^n$ Eigenvektor von $A^* A$ zum kleinsten Eigenwert $r_1$.
|
||||
Dann gilt
|
||||
\[
|
||||
\frac{\norm{Ab}}{\norm b} = \min\left\{\frac{\norm{Ax}}{\norm x}: x\in\R^n\right\} = \sqrt{r_1}
|
||||
\]
|
||||
\end{satz*}
|
||||
%\subsubsection{Anwendung: Ausgleichsquadrik}
|
||||
%Problem: homogenes LGS $Ax=0$. Finde $x$ mit $\norm x = 1$ und $\norm{Ax}$ minimal. \\
|
||||
%$b_1, \dots, b_n$ ONB aus EVen von $A^* A$ mit nichtnegativen EWen.
|
||||
%\begin{align*}
|
||||
% X = \sum \lambda_i b_i \implies \norm{Ax}^2 & = \inner{Ax}{Ax} \\
|
||||
% & = \inner{A^* A x}{x} =
|
||||
% \inner{\sum s_i \lambda_i b_i}{\sum \lambda_j b_j} = \sum_{i=1}^n s_i \abs{\lambda_i}^2
|
||||
%\end{align*}
|
||||
%$s_1 \le s_2 \le \dots \le s_n, \norm x = \sum \abs{\lambda_i}^2$
|
||||
%\[
|
||||
% \frac{\norm{Ax}}{\norm x} = \frac{\sum s_i \abs{\lambda_i}^2}{\sum \abs{\lambda_i}^2} \ge
|
||||
% \frac{s_1 \sum \abs{\lambda_i}^2}{\sum \abs{\lambda_i}^2} s_1
|
||||
%\]
|
||||
%$\norm x = 1 \implies \norm{Ax} \ge s_1$
|
||||
%$\norm{b_i} \implies \lambda_1, \lambda_2 = \dots = \lambda_n = 0 \implies \norm{Ab_1} = s_1 \implies b_1$
|
||||
%löst unser Minimierungsproblem. \\
|
||||
%$Q = \{(x,y) \in \R^2: \psi(x, y) = 0\}, \psi(x, y):= a_1 x^2 + a_2 xy + a_3 y^2 a_4 x + a_5 y + a_6$
|
||||
%Gegeben: $(x_i,y_i)^m_{i=1}$ Suche $x = (a_1, \dots, a_6)^T$ mit $\norm x = 1$ sodass
|
||||
%\[
|
||||
% \sum_{i=1}^m (a_1 x_i^2 + a_2 x_i y_i + a_3 y_i^2 + a_4 x_i + a_5 y_y + a_6)^2
|
||||
%\]
|
||||
%minimal.
|
||||
%$=\norm{Ax}^2, A = \begin{pmatrix}\end{pmatrix}$
|
||||
%
|
||||
%\begin{satz*}
|
||||
% Sei $A \in \K^{m \times n}$ und $b \in \K^n$ Eigenvektor von $A^* A$ zum kleinsten Eigenwert $r_1$.
|
||||
% Dann gilt
|
||||
% \[
|
||||
% \frac{\norm{Ab}}{\norm b} = \min\left\{\frac{\norm{Ax}}{\norm x}: x\in\R^n\right\} = \sqrt{r_1}
|
||||
% \]
|
||||
%\end{satz*}
|
||||
|
||||
\end{document}
|
||||
|
|
Loading…
Reference in New Issue