2022-03-30 20:19:11 +02:00
|
|
|
\documentclass[12pt, a4paper]{report}
|
2022-04-01 17:31:09 +02:00
|
|
|
|
2022-04-27 23:03:56 +02:00
|
|
|
\PassOptionsToPackage{dvipsnames}{xcolor}
|
|
|
|
\usepackage{tikz}
|
2022-03-30 20:19:11 +02:00
|
|
|
\usepackage{amsmath}
|
|
|
|
\usepackage{amsfonts}
|
|
|
|
\usepackage{enumitem}
|
2022-03-30 22:06:16 +02:00
|
|
|
\usepackage{amssymb}
|
2022-03-31 10:46:15 +02:00
|
|
|
\usepackage{marvosym}
|
2022-03-30 20:19:11 +02:00
|
|
|
\usepackage{mathtools}
|
2022-04-02 21:03:36 +02:00
|
|
|
\usepackage{cancel}
|
2022-04-03 18:10:13 +02:00
|
|
|
\usepackage[ngerman]{babel}
|
2022-04-28 10:33:22 +02:00
|
|
|
\usepackage{harpoon}
|
2022-06-09 16:15:44 +02:00
|
|
|
\usetikzlibrary{tikzmark,calc,arrows,arrows.meta,angles,math,decorations.markings}
|
2022-06-01 16:29:47 +02:00
|
|
|
\usepackage{pgfplots}
|
2022-06-08 23:13:15 +02:00
|
|
|
\usepackage{framed}
|
2023-01-04 23:18:29 +01:00
|
|
|
\usepackage[hyperref,amsmath,thmmarks,thref,framed]{ntheorem}
|
|
|
|
\usepackage[colorlinks=true, linkcolor=magenta]{hyperref}
|
2022-06-08 23:13:15 +02:00
|
|
|
\usepackage{tcolorbox}
|
2022-06-09 11:00:51 +02:00
|
|
|
\usepackage{geometry}
|
|
|
|
\geometry{a4paper, top=35mm, left=25mm, right=25mm, bottom=30mm}
|
2022-06-01 16:29:47 +02:00
|
|
|
|
2022-06-01 16:47:16 +02:00
|
|
|
\pgfplotsset{compat=1.17}
|
2022-03-30 20:19:11 +02:00
|
|
|
|
|
|
|
\title{Lineare Algebra 2}
|
2022-03-30 21:00:40 +02:00
|
|
|
\date{Sommersemester 2022}
|
2022-04-01 17:31:09 +02:00
|
|
|
\author{Philipp Grohs \\ \small \LaTeX-Satz: Anton Mosich}
|
2022-03-30 20:19:11 +02:00
|
|
|
|
2022-04-27 23:03:56 +02:00
|
|
|
\newcounter{textbox}
|
|
|
|
\def\tl{\stepcounter{textbox}\tikzmarknode{a\thetextbox}{\strut}}
|
2023-01-31 13:30:38 +01:00
|
|
|
\def\br{\tikzmarknode{b\thetextbox}{\strut}
|
|
|
|
\begin{tikzpicture}[overlay, remember picture]
|
|
|
|
\draw ($(a\thetextbox.north west)+(-0.4\arraycolsep,0ex)$) rectangle
|
|
|
|
($(b\thetextbox.south east)+(0.2\arraycolsep,0ex)$);
|
|
|
|
\end{tikzpicture}
|
|
|
|
}
|
2022-04-27 23:03:56 +02:00
|
|
|
% https://tex.stackexchange.com/questions/481978/how-to-write-the-block-matrix-in-latex
|
|
|
|
|
2022-04-28 10:33:22 +02:00
|
|
|
\newcommand*{\vect}[1]{\overrightharp{\ensuremath{#1}}}
|
|
|
|
\newcommand\R{\ensuremath{\mathbb{R}}}
|
|
|
|
\newcommand\C{\ensuremath{\mathbb{C}}}
|
|
|
|
\newcommand\K{\ensuremath{\mathbb{K}}}
|
2022-06-08 18:03:22 +02:00
|
|
|
\newcommand\mapsfrom{\rotatebox{180}{$\mapsto$}}
|
2022-04-28 10:33:22 +02:00
|
|
|
|
2023-01-04 23:18:29 +01:00
|
|
|
\theoremsymbol{\ensuremath{\square}}
|
|
|
|
\theorembodyfont{\normalfont}
|
|
|
|
\theoremheaderfont{\normalfont\it}
|
|
|
|
\theoremseparator{.}
|
|
|
|
\newtheorem*{proof}{Beweis}
|
|
|
|
\qedsymbol{\Lightning}
|
|
|
|
|
2022-06-09 23:44:55 +02:00
|
|
|
\definecolor{pastellblau}{HTML}{5BCFFA}
|
|
|
|
\definecolor{pastellrosa}{HTML}{F5ABB9}
|
|
|
|
\definecolor{weiss}{HTML}{FFFFFF}
|
|
|
|
|
2022-06-08 23:13:15 +02:00
|
|
|
\theoremstyle{break}
|
2022-06-09 10:59:32 +02:00
|
|
|
\theoremseparator{:\smallskip}
|
2022-06-08 23:13:15 +02:00
|
|
|
\theoremindent=1em
|
|
|
|
\theoremheaderfont{\kern-1em\normalfont\bfseries}
|
2022-06-09 10:59:32 +02:00
|
|
|
\theorembodyfont{\normalfont}
|
2022-06-08 23:13:15 +02:00
|
|
|
\theoreminframepreskip{0em}
|
|
|
|
\theoreminframepostskip{0em}
|
2023-01-04 23:18:29 +01:00
|
|
|
\theoremsymbol{}
|
2023-01-04 21:56:36 +01:00
|
|
|
\newtcbox{\theoremBox}{colback=pastellrosa!17,colframe=pastellrosa!87,boxsep=0pt,left=7pt,right=7pt,top=7pt,bottom=7pt}
|
2022-06-08 23:13:15 +02:00
|
|
|
\def\theoremframecommand{\theoremBox}
|
|
|
|
|
|
|
|
\newshadedtheorem{theo}{Theorem}[section]
|
2022-03-30 20:19:11 +02:00
|
|
|
|
2022-06-08 23:13:15 +02:00
|
|
|
\newshadedtheorem{satz}[theo]{Satz}
|
2022-06-11 12:51:34 +02:00
|
|
|
\theoremstyle{nonumberbreak}
|
|
|
|
\newshadedtheorem{nonumbersatz}{Satz}
|
|
|
|
\theoremstyle{break}
|
2022-06-09 10:59:32 +02:00
|
|
|
\newshadedtheorem{lemma}[theo]{Lemma}
|
2022-06-08 23:13:15 +02:00
|
|
|
\newshadedtheorem{korollar}[theo]{Korollar}
|
|
|
|
\newshadedtheorem{folgerung}[theo]{Folgerung}
|
2023-01-04 21:56:36 +01:00
|
|
|
\newtcbox{\definBox}{colback=pastellblau!17,colframe=pastellblau!94,boxsep=0pt,left=7pt,right=7pt,top=7pt,bottom=7pt}
|
2022-06-09 10:59:32 +02:00
|
|
|
\def\theoremframecommand{\definBox}
|
|
|
|
\newshadedtheorem{defin}[theo]{Definition}
|
2022-03-30 20:19:11 +02:00
|
|
|
|
|
|
|
\DeclareMathOperator{\sgn}{sgn}
|
2022-06-07 19:16:17 +02:00
|
|
|
\DeclareMathOperator{\rg}{rg}
|
2022-03-30 23:31:00 +02:00
|
|
|
\DeclareMathOperator{\spec}{spec}
|
2022-03-30 20:19:11 +02:00
|
|
|
\DeclareMathOperator{\spur}{sp}
|
2022-04-28 10:33:22 +02:00
|
|
|
\DeclareMathOperator{\Hom}{Hom}
|
2022-03-30 22:06:16 +02:00
|
|
|
\DeclareMathOperator{\adj}{adj}
|
2022-03-30 20:19:11 +02:00
|
|
|
\DeclareMathOperator{\id}{id}
|
|
|
|
\DeclareMathOperator{\diag}{diag}
|
2022-03-30 23:31:00 +02:00
|
|
|
\DeclareMathOperator{\eig}{Eig}
|
|
|
|
\DeclareMathOperator{\nxn}{n \times n}
|
2022-04-12 12:36:21 +02:00
|
|
|
\DeclareMathOperator{\im}{im}
|
2022-05-19 09:42:55 +02:00
|
|
|
\DeclareMathOperator{\GL}{GL}
|
2022-03-30 20:19:11 +02:00
|
|
|
|
2022-04-28 10:33:22 +02:00
|
|
|
\newcommand\homkv{\Hom_\K(V, V)}
|
|
|
|
\newcommand\homk{\Hom_\K}
|
2022-06-13 11:29:12 +02:00
|
|
|
\newcommand\linspan[1]{\left\langle #1 \right\rangle}
|
2022-06-13 10:52:46 +02:00
|
|
|
\newcommand\inner[2]{\left\langle #1, #2 \right\rangle}
|
|
|
|
\newcommand\norm[1]{\left\lVert #1 \right\rVert}
|
2022-06-07 19:16:17 +02:00
|
|
|
\newcommand\ontop[2]{\genfrac{}{}{0pt}{0}{#1}{#2}}
|
2022-06-13 10:52:46 +02:00
|
|
|
\newcommand\abs[1]{\left\lvert #1 \right\rvert}
|
2022-06-22 10:05:40 +02:00
|
|
|
\newcommand\real{\mathfrak{Re}}
|
2022-04-28 10:33:22 +02:00
|
|
|
|
2022-06-15 19:33:18 +02:00
|
|
|
\newif\ifhideproofs
|
|
|
|
%\hideproofstrue
|
|
|
|
|
|
|
|
\ifhideproofs
|
2023-01-31 13:30:38 +01:00
|
|
|
\usepackage{environ}
|
|
|
|
\NewEnviron{hide}{}
|
|
|
|
\let\proof\hide
|
|
|
|
\let\endproof\endhide
|
2022-06-15 19:33:18 +02:00
|
|
|
\fi
|
|
|
|
|
2022-03-30 20:19:11 +02:00
|
|
|
\begin{document}
|
2022-06-08 20:59:01 +02:00
|
|
|
|
2022-06-08 22:09:26 +02:00
|
|
|
\tikzset{%
|
|
|
|
-||-/.style={decoration={markings,
|
2022-06-09 16:15:44 +02:00
|
|
|
mark=at position 0.5 with {\draw[thick, -] (-.2,-.2) -- (0, .2);\draw[thick, -] (0, -.2) -- (.2, .2);}},
|
2022-06-08 22:09:26 +02:00
|
|
|
postaction={decorate}},
|
|
|
|
}
|
|
|
|
|
2022-06-08 20:59:01 +02:00
|
|
|
\begin{titlepage}
|
|
|
|
\begin{tikzpicture}[remember picture, overlay]
|
2022-06-08 22:09:26 +02:00
|
|
|
% Trans pride flag
|
2022-06-08 20:59:01 +02:00
|
|
|
\foreach[count=\i] \col in {pastellblau,pastellrosa,weiss,pastellrosa,pastellblau}
|
|
|
|
\node (back names) [shape=rectangle,
|
|
|
|
fill=\col,
|
|
|
|
minimum width=\paperwidth / 5,
|
|
|
|
anchor=south west,
|
|
|
|
minimum height=\paperheight] at ([xshift=(\i - 1)*(\paperwidth / 5)]current page.south west) {};
|
2022-06-08 22:09:26 +02:00
|
|
|
% The Loss
|
|
|
|
% Panel 1
|
2022-06-09 16:15:44 +02:00
|
|
|
\draw[line width=.5mm, {Stealth[scale=1.3]}-] ([xshift = 85pt, yshift = -60pt]current page.north west) -- +(0, -.35\paperheight);
|
2022-06-08 22:09:26 +02:00
|
|
|
% Panel 2
|
2022-06-09 16:15:44 +02:00
|
|
|
\draw[line width=.5mm, {Stealth[scale=1.3]}-, -||-] ([xshift = -85pt, yshift = -.1\paperheight - 60pt] current page.north east) --
|
2022-06-08 22:09:26 +02:00
|
|
|
+(0, -.25\paperheight);
|
2022-06-09 16:15:44 +02:00
|
|
|
\draw[line width=.5mm, {Stealth[scale=1.3]}-, -||-] ([xshift = -185pt, yshift = -60pt]current page.north east) --
|
2022-06-08 22:09:26 +02:00
|
|
|
+(0, -.35\paperheight);
|
|
|
|
% Panel 3
|
2022-06-09 16:15:44 +02:00
|
|
|
\draw[line width=.5mm, -{Stealth[scale=1.3]}, -||-] ([xshift = 40pt, yshift = 60pt] current page.south west) -- +(0, .35\paperheight);
|
|
|
|
\draw[line width=.5mm, -{Stealth[scale=1.3]}, -||-] ([xshift = 175pt, yshift = 60pt] current page.south west) -- +(0, .35\paperheight);
|
2022-06-08 22:09:26 +02:00
|
|
|
% Panel 4
|
2022-06-09 16:15:44 +02:00
|
|
|
\draw[line width=.5mm, -{Stealth[scale=1.3]}] ([xshift = -175pt, yshift = 60pt] current page.south east) -- +(0, .35\paperheight);
|
|
|
|
\draw[line width=.5mm, {Stealth[scale=1.3]}-] ([yshift = 120pt, xshift = -25pt] current page.south east) -- +(-.38\paperwidth,0);
|
2022-06-08 22:09:26 +02:00
|
|
|
\draw[very thick] ([xshift = -175+40pt, yshift = 120]current page.south east)
|
|
|
|
arc [radius=40pt, start angle=0, end angle=90];
|
|
|
|
% Title, Author & Date
|
2022-06-08 22:09:57 +02:00
|
|
|
\node at ([yshift = -.45\paperheight]current page.north) {\Huge{ \textbf{Lineare Algebra 2} }};
|
|
|
|
\node at ([yshift = -.52\paperheight]current page.north) {\Large{Philipp Grohs}};
|
|
|
|
\node at ([yshift = -.55\paperheight]current page.north) {\large{\LaTeX-Satz: Anton Mosich}};
|
|
|
|
\node at ([yshift = -.60\paperheight]current page.north) {\large{Sommersemester 2022}};
|
2022-06-08 20:59:01 +02:00
|
|
|
\end{tikzpicture}
|
|
|
|
\end{titlepage}
|
|
|
|
|
2022-04-03 18:10:13 +02:00
|
|
|
\tableofcontents
|
|
|
|
|
2022-03-30 20:19:11 +02:00
|
|
|
\chapter{Determinanten}
|
2022-03-30 21:00:40 +02:00
|
|
|
|
2022-03-30 20:19:11 +02:00
|
|
|
\section{Permutationen}
|
2022-03-30 21:00:40 +02:00
|
|
|
|
2022-03-30 20:19:11 +02:00
|
|
|
\begin{defin}
|
2022-04-12 12:48:05 +02:00
|
|
|
Sei $n \in \mathbb{N} \setminus \{0\}, [n] := \{1, 2, \dots, n\}$. \\
|
2023-03-28 11:46:57 +02:00
|
|
|
Eine bijektive Abbildung $\pi\colon[n]\to[n]$ heißt \underline{Permutation} von $[n]$.
|
2022-04-12 12:48:05 +02:00
|
|
|
Wir definieren die \underline{symmetrische Gruppe}
|
|
|
|
$S_n := \{\pi\text{ Permutation von }[n]\}$
|
|
|
|
mit der Hintereinanderausführung als Gruppenoperation.
|
2022-03-30 20:19:11 +02:00
|
|
|
\end{defin}
|
2022-04-01 17:31:09 +02:00
|
|
|
|
2022-03-30 20:19:11 +02:00
|
|
|
\subsubsection{Bemerkung}
|
|
|
|
\begin{itemize}
|
2022-04-12 12:48:05 +02:00
|
|
|
\item $(S_n, \circ)$ ist eine Gruppe.
|
|
|
|
\item $\pi\in S_n$ ist eindeutig durch das Tupel $(\pi(1), \dots, \pi(n))$ definiert.
|
|
|
|
\item Fixpunkte $(\pi(i)=i)$ werden oft weggelassen.
|
2022-03-30 20:19:11 +02:00
|
|
|
\end{itemize}
|
|
|
|
|
|
|
|
\begin{defin}
|
2022-04-12 12:48:05 +02:00
|
|
|
$\pi\in S_n$ heißt \underline{Transposition} wenn es $i, j\in [n]$ gibt mit
|
2023-01-31 13:30:38 +01:00
|
|
|
\[\pi(k) =
|
|
|
|
\begin{cases}
|
|
|
|
k & k\notin\{i, j\} \\
|
|
|
|
i & k = j \\
|
|
|
|
j & k=i
|
|
|
|
\end{cases}
|
|
|
|
\]
|
2022-04-12 12:48:05 +02:00
|
|
|
Wir schreiben $\pi = (ij)$.
|
2022-03-30 20:19:11 +02:00
|
|
|
\end{defin}
|
|
|
|
|
2022-06-08 23:25:28 +02:00
|
|
|
\begin{satz}
|
|
|
|
\label{theo:1.1.3}
|
2023-01-31 13:30:38 +01:00
|
|
|
Es gilt $\abs{ S_n } = n!$.
|
|
|
|
\end{satz}
|
2022-06-08 23:13:15 +02:00
|
|
|
\begin{proof}
|
|
|
|
Vollständige Induktion
|
|
|
|
\begin{itemize}
|
2022-06-13 11:44:23 +02:00
|
|
|
\item[$n=1$:] $S_1 = \{\id\}\implies\abs{ S_1} = 1 = 1!$
|
|
|
|
\item[$n-1\to n$:] Angenommen $\abs{ S_{n-1} } = (n-1)!$.
|
2023-03-28 11:46:57 +02:00
|
|
|
Dann gilt $\abs{\{\pi \in S_n\colon \pi(n) = n \}} = (n-1)!$. Sei allgemein $i \in [n]$.
|
2022-06-13 11:44:23 +02:00
|
|
|
Dann gilt $\pi(n)=i \iff (in)\circ\pi(n)=n$. Also gilt
|
|
|
|
\begin{align*}
|
2023-03-28 11:46:57 +02:00
|
|
|
& \abs{\{\pi\in S_n\colon \pi(n)=i\}} = \abs{\{(in)\circ\pi\colon \pi(n)=n\}} \\
|
|
|
|
& = \abs{\{\pi\colon \pi(n)=n\}} = (n-1)!
|
2022-06-13 11:44:23 +02:00
|
|
|
\end{align*}
|
|
|
|
Weiters gilt
|
|
|
|
\begin{align*}
|
2023-03-28 11:46:57 +02:00
|
|
|
& S_n = \bigcup_{i\in[n]}^\bullet\{\pi\in S_n\colon \pi(n)=i\} \implies \\
|
|
|
|
& \abs{S_n}= \sum_{i\in[n]}\abs{\{\pi \in S_n\colon \pi(n) = i\}}
|
2022-06-13 11:44:23 +02:00
|
|
|
= n\cdot(n-1)! = n!
|
|
|
|
\end{align*}
|
2022-06-08 23:13:15 +02:00
|
|
|
\end{itemize}
|
|
|
|
\end{proof}
|
2022-03-30 20:19:11 +02:00
|
|
|
|
2022-06-08 23:25:28 +02:00
|
|
|
\begin{satz}
|
|
|
|
\label{theo:1.1.4}
|
2022-04-12 12:48:05 +02:00
|
|
|
Für $n\in \mathbb{N}_{\ge2}$ ist jedes $\pi \in S_n$ das Produkt von (endlich vielen) Transpositionen.
|
2022-03-30 20:19:11 +02:00
|
|
|
\end{satz}
|
2022-06-08 23:25:28 +02:00
|
|
|
\begin{proof}
|
2022-06-13 11:44:23 +02:00
|
|
|
Vollständige Induktion
|
2022-06-08 23:25:28 +02:00
|
|
|
\begin{itemize}
|
2022-06-13 11:44:23 +02:00
|
|
|
\item[$n=2$:] $S_2 = \{\id, (2 1)\}$
|
|
|
|
\item[$n-1\to n$:]
|
|
|
|
Sei $\pi \in S_n$. Dann gilt (siehe Beweis von Satz \ref{theo:1.1.3}) mit $i=\pi(n)$, dass
|
|
|
|
\[\underbrace{(i n)\pi}_{\pi_i}(n) = n\]
|
|
|
|
Sei $\pi_i = (\underbrace{\pi_i(1) \dots \pi_i(n-1)}_{\in S_{n-1}} n)
|
|
|
|
\underset{\text{Induktions VS}}{\implies} \pi_i = (i_1 j_1) \dots (i_k j_k)$.\\
|
|
|
|
Außerdem gilt $\pi = (i n)\pi_i$, also $\pi = (i n)(i_1 j_1) \dots (i_k j_k)$
|
2022-06-08 23:25:28 +02:00
|
|
|
\end{itemize}
|
|
|
|
\end{proof}
|
2022-06-08 23:13:15 +02:00
|
|
|
|
2022-03-30 20:19:11 +02:00
|
|
|
\subsubsection{Bemerkung}
|
|
|
|
\begin{itemize}
|
2023-03-28 11:46:57 +02:00
|
|
|
\item Produktdarstellung ist nicht eindeutig, zum Beispiel:
|
|
|
|
\[
|
|
|
|
(3 1 2) = (2 1)(3 1) = (3 1)(3 2)
|
|
|
|
\]
|
2022-05-07 19:59:06 +02:00
|
|
|
\item $f\in \mathbb{Z}[X_1, \dots, X_n], \pi \in S_n$ \\
|
|
|
|
$\pi f(X_1, \dots, X_n) := f(X_{\pi(1)}, \dots, X_{\pi(n)})$
|
2022-03-30 20:19:11 +02:00
|
|
|
\end{itemize}
|
|
|
|
\subsubsection{Beispiel}
|
|
|
|
$\pi = (2 3 1), f(X_1, X_2, X_3) = X_1-X_2+X_1X_3 \implies \pi f(X_1, X_2, X_3) = X_2 - X_3 + X_2X_1$
|
|
|
|
|
2022-06-08 23:25:28 +02:00
|
|
|
\begin{lemma}
|
|
|
|
\label{theo:1.1.5}
|
2022-05-07 19:59:06 +02:00
|
|
|
Sei
|
|
|
|
\[
|
|
|
|
f(X_1, \dots, X_n) = \prod_{\substack{i, j\in[n]\\ i < j}} (X_j-X_i)\in \mathbb{Z}[X_1, \dots, X_n]
|
|
|
|
\]
|
2023-01-31 13:30:38 +01:00
|
|
|
Dann gilt
|
|
|
|
\begin{enumerate}[label=\alph*)]
|
|
|
|
\item Zu jedem $\pi \in S_n$ existiert eine eindeutig Zahl $s(\pi) \in \{-1, 1\}$ mit
|
|
|
|
$\pi f = s(\pi)f$.
|
2022-04-12 12:48:05 +02:00
|
|
|
\item Für $\pi$ eine Transposition gilt $s(\pi) = -1$.
|
|
|
|
\end{enumerate}
|
2022-03-30 20:19:11 +02:00
|
|
|
\end{lemma}
|
2022-06-08 23:25:28 +02:00
|
|
|
\begin{proof}
|
|
|
|
\begin{enumerate}[label=\alph*)]
|
2023-01-31 13:30:38 +01:00
|
|
|
\item
|
|
|
|
\begin{align*}
|
2023-03-28 11:46:57 +02:00
|
|
|
\pi f(X_1, \dots, X_n) & = \prod_{i<j}(X_{\pi(j)}-X_{\pi(i)}) \\
|
|
|
|
& =\Bigl(\prod_{\substack{i<j \\
|
2023-01-31 13:30:38 +01:00
|
|
|
\pi(i)<\pi(j)}}
|
|
|
|
(X_{\pi(j)}-X_{\pi(i)})\Bigr)
|
2023-03-28 11:46:57 +02:00
|
|
|
\Bigl(\prod_{\substack{i<j \\
|
|
|
|
\pi(j)<\pi(i)}}(X_{\pi(j)}-X_{\pi(i)})\Bigr) \\
|
|
|
|
& = (-1)^{\abs{\{(i, j)\in[n]\times[n]\colon i<j\land\pi(i)>\pi(j)\}}}
|
|
|
|
\prod_{i<j}(X_j-X_i) \\
|
|
|
|
& = s(\pi)f(X_1, \dots, X_n) \text{ mit } \\
|
|
|
|
s(\pi) & = (-1)^{\abs{\{(i, j)\in[n]\times[n]\colon i<j\land\pi(i)>\pi(j)\}}}
|
2022-06-08 23:25:28 +02:00
|
|
|
\end{align*}
|
2023-03-28 11:46:57 +02:00
|
|
|
\item $\pi = (i j), i<j, k\in\{i+1, \dots, j-1\}\colon
|
|
|
|
\pi(i, j) = (j, i), \pi(i, k) = (j, k), \pi(k, j) = (k, i)$\\
|
2022-06-08 23:25:28 +02:00
|
|
|
Für diese Paare gilt $x<y \land \pi(x) > \pi(y)$\\
|
|
|
|
Für alle anderen Paare gilt $x<y \land \pi(x)<\pi(y)$\\
|
|
|
|
Erstere sind $2(j-i-1)+1$ Paare. Daraus folgt $\pi f=(-1)^{2(j-i-1)+1}f$, also $s(\pi)=-1$.
|
|
|
|
\end{enumerate}
|
|
|
|
\end{proof}
|
2022-03-30 20:19:11 +02:00
|
|
|
|
|
|
|
\begin{defin}
|
2022-04-12 12:48:05 +02:00
|
|
|
\begin{itemize}
|
2023-01-31 13:30:38 +01:00
|
|
|
\item Die durch Lemma \ref{theo:1.1.5} bestimmte Größe $s(\pi)$ heißt
|
|
|
|
\underline{Signum} von $\pi \in S_n$. Wir schreiben $\sgn(\pi)$.
|
2022-04-12 12:48:05 +02:00
|
|
|
\item $\pi$ heißt \underline{gerade} falls $\sgn(\pi)=1$ und \underline{ungerade} falls $\sgn(\pi)=-1$.
|
|
|
|
\end{itemize}
|
2022-03-30 20:19:11 +02:00
|
|
|
\end{defin}
|
|
|
|
|
2022-06-08 23:25:28 +02:00
|
|
|
\begin{satz}
|
|
|
|
\label{theo:1.1.7}
|
2022-04-12 12:48:05 +02:00
|
|
|
Für $\pi, \sigma \in S_n$ gilt \[\sgn(\sigma\pi)=\sgn(\sigma)\sgn(\pi)\]
|
2022-03-30 20:19:11 +02:00
|
|
|
\end{satz}
|
2022-06-08 23:25:28 +02:00
|
|
|
\begin{proof}
|
|
|
|
Nach Satz \ref{theo:1.1.5}(a) gilt:
|
|
|
|
\begin{align*}
|
|
|
|
& f(X_1, \dots, X_n) = \prod\limits_{i<j}(X_j-X_i) \implies \\
|
|
|
|
& \sigma\pi f(X_1, \dots, X_n) = \sgn(\sigma\pi)f(X_1, \dots, X_n)
|
|
|
|
\end{align*}
|
|
|
|
Andererseits gilt:
|
|
|
|
\begin{align*}
|
|
|
|
\sigma\pi f(X_1, \dots, X_n) & = \sigma[\pi f(X_1, \dots, X_n)] \\
|
|
|
|
& = \sigma[\sgn(\pi)f(X_1, \dots, X_n)] \\
|
|
|
|
& = \sgn(\pi) \sigma f(X_1, \dots, X_n) \\
|
|
|
|
& = \sgn(\pi)\sgn(\sigma)f(X_1, \dots, X_n)
|
|
|
|
\end{align*}
|
|
|
|
\end{proof}
|
2022-03-30 20:19:11 +02:00
|
|
|
|
|
|
|
\begin{satz}
|
2022-04-12 12:48:05 +02:00
|
|
|
\begin{enumerate}[label=\alph*)]
|
|
|
|
\item $\sgn(\pi)=1\iff\pi$ ist Produkt gerader Anzahl Transpositionen
|
|
|
|
\item $\pi$ Produkt von k Transpositionen $\implies \sgn(\pi)=(-1)^k$
|
|
|
|
\end{enumerate}
|
2022-03-30 20:19:11 +02:00
|
|
|
\end{satz}
|
2022-06-08 23:25:28 +02:00
|
|
|
\begin{proof}
|
|
|
|
Folgt direkt aus Satz \ref{theo:1.1.5}(b) und Satz \ref{theo:1.1.7}
|
|
|
|
\end{proof}
|
2022-03-30 20:19:11 +02:00
|
|
|
|
|
|
|
\begin{folgerung}
|
2022-04-12 12:48:05 +02:00
|
|
|
Es gibt genau $\frac12n!$ gerade und $\frac12n!$ ungerade Permutationen in $S_n$
|
2022-03-30 20:19:11 +02:00
|
|
|
\end{folgerung}
|
2022-06-08 23:25:28 +02:00
|
|
|
\begin{proof}
|
|
|
|
Folgt aus Satz \ref{theo:1.1.3}
|
|
|
|
\end{proof}
|
2022-03-30 20:19:11 +02:00
|
|
|
|
2022-04-03 19:15:57 +02:00
|
|
|
\begin{defin}
|
2022-06-09 11:00:51 +02:00
|
|
|
Die geraden Permutationen bilden eine Untergruppe $A_n$ von $S_n$, die man \\
|
2022-05-07 19:59:06 +02:00
|
|
|
\underline{alternierende Gruppe} nennt.
|
2022-04-03 19:15:57 +02:00
|
|
|
\end{defin}
|
2022-03-30 20:19:11 +02:00
|
|
|
|
|
|
|
\section{Multilinearformen}
|
|
|
|
\begin{defin}
|
2023-03-28 11:46:57 +02:00
|
|
|
Seien $V_1, \dots, V_n, W$ \K-Vektorräume. Eine Abbildung $\varphi\colon V_1 \times \dots \times V_n \to W$
|
2022-05-07 19:59:06 +02:00
|
|
|
heißt \underline{n-linear}, wenn für alle
|
|
|
|
$v_1, v'_1 \in V_1, \dots, v_n, v'_n\in V_n, i \in [n], \lambda\in\K$ gilt, dass
|
2022-04-12 12:48:05 +02:00
|
|
|
\begin{itemize}
|
2022-05-07 19:59:06 +02:00
|
|
|
\item $\varphi(v_1, \dots, v_i+v'_i, \dots, v_n)=
|
|
|
|
\varphi(v_1, \dots, v_i, \dots, v_n)+\varphi(v_1, \dots, v'_i, \dots, v_n)$
|
2022-04-12 12:48:05 +02:00
|
|
|
\item $\varphi(v_1, \dots, \lambda v_i, \dots, v_n)= \lambda\varphi(v_1, \dots, v_i, \dots, v_n)$.
|
|
|
|
\end{itemize}
|
2022-05-07 19:59:06 +02:00
|
|
|
Ist $W=\K$ und $V_1, \dots, V_n=V$, so heißt $\varphi$ \underline{n-Linearform}. \\
|
|
|
|
($n=2 \to$ \underline{Bilinearform})
|
2022-03-30 20:19:11 +02:00
|
|
|
\end{defin}
|
|
|
|
|
|
|
|
\subsubsection{Beispiel}
|
2022-04-04 22:25:02 +02:00
|
|
|
\[
|
2023-03-28 11:46:57 +02:00
|
|
|
\varphi\colon
|
2022-04-12 12:48:05 +02:00
|
|
|
\begin{cases}
|
2022-04-28 10:33:22 +02:00
|
|
|
\K^2\times \K^2 & \to \K \\
|
2023-01-31 13:30:38 +01:00
|
|
|
\left(
|
|
|
|
\begin{pmatrix}
|
|
|
|
a_{11} \\
|
|
|
|
a_{21}
|
|
|
|
\end{pmatrix}
|
|
|
|
,
|
|
|
|
\begin{pmatrix}
|
|
|
|
a_{12} \\
|
|
|
|
a_{22}
|
|
|
|
\end{pmatrix}
|
|
|
|
\right)
|
2022-04-28 10:33:22 +02:00
|
|
|
& \mapsto a_{11}a_{22} - a_{12}a_{21}
|
2022-04-12 12:48:05 +02:00
|
|
|
\end{cases}
|
2022-04-04 22:25:02 +02:00
|
|
|
\]
|
2022-03-30 20:19:11 +02:00
|
|
|
|
2023-01-31 13:30:38 +01:00
|
|
|
\begin{defin}
|
|
|
|
\label{theo:1.2.2}
|
2022-05-07 19:59:06 +02:00
|
|
|
Eine n-Linearform von $V$ heißt
|
2022-04-12 12:48:05 +02:00
|
|
|
\begin{itemize}
|
|
|
|
\item \underline{nicht ausgeartet}, falls
|
|
|
|
$(a_1, \dots, a_n)\in V\times\dots\times V$ existiert mit \\
|
|
|
|
$\varphi(a_1, \dots, a_n) \neq 0$.
|
|
|
|
\item \underline{alternierend}, falls $\varphi(a_1, \dots, a_n)=0$ für $a_1, \dots, a_n$ linear abhängig.
|
|
|
|
\end{itemize}
|
2022-03-30 20:19:11 +02:00
|
|
|
\end{defin}
|
|
|
|
|
|
|
|
\subsubsection{Bemerkung}
|
|
|
|
$\varphi$ alternierend und $a_i = a_j$ für $i\neq j \implies \varphi(a_1, \dots, a_n) = 0$.
|
|
|
|
|
2022-06-08 23:25:28 +02:00
|
|
|
\begin{lemma}
|
|
|
|
\label{theo:1.2.3}
|
2023-03-28 11:46:57 +02:00
|
|
|
Sei $\varphi$ alternierende n-Linearform von V und $\pi \in S_n$. Dann gilt für
|
2022-04-12 12:48:05 +02:00
|
|
|
$a_1, \dots, a_n\in V$:
|
2023-03-28 11:46:57 +02:00
|
|
|
\[\varphi\left(a_{\pi(1)}, \dots, a_{\pi(n)}\right)=\sgn(\pi)\varphi(a_1, \dots, a_n)\]
|
2022-03-30 20:19:11 +02:00
|
|
|
\end{lemma}
|
2022-06-08 23:25:28 +02:00
|
|
|
\begin{proof}
|
|
|
|
Wegen Satz \ref{theo:1.1.4} und Satz \ref{theo:1.1.7} genügt es anzunehmen, dass $\pi$ Transposition ist.
|
|
|
|
Sei also $\pi=(ij)$. Es gilt
|
|
|
|
\begin{align*}
|
|
|
|
0 & =\varphi(a_1, \dots, \underbrace{a_i+a_j}_{i}, \dots, \underbrace{a_i+a_j}_{j}, \dots, a_n) \\
|
|
|
|
& =\underbrace{\varphi(a_1, \dots, a_i, \dots, a_i, \dots, a_n)}_{0} +
|
|
|
|
\underbrace{\varphi(a_1, \dots, a_j, \dots, a_j, \dots, a_n)}_{0} \\
|
|
|
|
& \;\; + \varphi(a_1, \dots, a_i, \dots, a_j, \dots, a_n) +
|
|
|
|
\varphi(a_1, \dots, a_j, \dots, a_i, \dots, a_n) \\
|
|
|
|
& \implies \varphi(a_1, \dots, a_j, \dots, a_i, \dots, a_n)=
|
|
|
|
\underbrace{(-1)}_{=\sgn{\pi}}\varphi(a_1, \dots, a_i, \dots, a_j, \dots, a_n)
|
|
|
|
\end{align*}
|
|
|
|
\end{proof}
|
2022-03-30 20:19:11 +02:00
|
|
|
|
2022-06-08 23:25:28 +02:00
|
|
|
\begin{lemma}
|
|
|
|
\label{theo:1.2.4}
|
2022-05-12 09:32:32 +02:00
|
|
|
Sei $V$ ein $\K$-VR mit $\dim(V)=n$ und $\varphi$ nicht ausgeartete und alternierende n-Linearform von V.
|
2022-05-07 19:59:06 +02:00
|
|
|
Dann gilt
|
|
|
|
\[
|
|
|
|
a_1, \dots, a_n \text{ linear abhängig} \iff \varphi(a_1, \dots, a_n) = 0
|
|
|
|
\]
|
2022-03-30 20:19:11 +02:00
|
|
|
\end{lemma}
|
2022-06-08 23:25:28 +02:00
|
|
|
\begin{proof}
|
|
|
|
\begin{itemize}
|
2022-06-13 11:44:23 +02:00
|
|
|
\item[$\implies$:] folgt aus Definition \ref{theo:1.2.2}
|
|
|
|
\item[$\impliedby$:] z.Z.: $\varphi(b_1, \dots, b_n)\neq0\impliedby b_1, \dots, b_n \text{ Basis von } V$.
|
|
|
|
Da $\varphi$ nicht ausgeartet ist, gibt es $a_1, \dots, a_n\in V$ mit $\varphi(a_1, \dots, a_n)\neq0$.\\
|
|
|
|
Da $b_1, \dots, b_n$ Basis gibt es $\lambda_{ij}\in\K$ mit $a_i=\sum\limits_{j=1}^n{\lambda_{ij}b_j}$\\
|
|
|
|
Wegen n-Linearität gilt
|
|
|
|
\begin{align*}
|
|
|
|
0\neq\varphi(a_1, \dots, a_n) & =\sum_{j_1=1}^n{\dots}\sum_{j_n=1}^n{\varphi(b_{j_1}, \dots, b_{j_n})
|
|
|
|
\lambda_{1j_1}\cdots\lambda_{nj_n}} \\
|
|
|
|
& \underbrace{=}_{\mathclap{\varphi\text{ alternierend}}}
|
|
|
|
\sum_{\substack{j_1, \dots, j_n \\
|
2023-01-31 13:30:38 +01:00
|
|
|
\text{paarweise verschieden}}}
|
2022-06-18 18:20:57 +02:00
|
|
|
{\varphi(b_{j_1}, \dots, b_{j_n})\lambda_{1j_1} \cdots \lambda_{nj_n}} \\
|
2022-06-13 11:44:23 +02:00
|
|
|
& = \sum_{\pi\in S_n} \varphi(b_{\pi(1)}, \dots, b_{\pi(n)})
|
|
|
|
\lambda_{1\pi(1)} \cdots \lambda_{n\pi(n)} \\
|
|
|
|
& \underbrace{=}_{\mathclap{\text{Lemma \ref{theo:1.2.3}}}}
|
|
|
|
\varphi(b_1, \dots, b_n)\left(\sum_{\pi\in S_n}
|
|
|
|
\sgn(\pi)\lambda_{1\pi(1)}\cdots\lambda_{n\pi(n)}\right) \\
|
|
|
|
& \implies\varphi(b_1, \dots, b_n)\neq 0
|
|
|
|
\end{align*}
|
2022-06-08 23:25:28 +02:00
|
|
|
\end{itemize}
|
|
|
|
\end{proof}
|
2022-03-30 20:19:11 +02:00
|
|
|
|
2022-06-08 23:25:28 +02:00
|
|
|
\begin{satz}
|
|
|
|
\label{theo:1.2.5}
|
2022-04-28 10:33:22 +02:00
|
|
|
Sei V $\K$-VR mit $\dim(V)=n$ und Basis $a_1, \dots, a_n$.
|
2022-04-12 12:48:05 +02:00
|
|
|
\begin{enumerate}[label=\alph*)]
|
2023-01-31 13:30:38 +01:00
|
|
|
\item Für $\varphi$ alternierende nicht ausgeartete n-Linearform gilt für\\ $b_i =
|
|
|
|
\sum\lambda_{ij}a_j$, dass
|
2022-05-07 19:59:06 +02:00
|
|
|
\[
|
|
|
|
\varphi(b_1, \dots, b_n) =
|
2022-06-15 19:34:43 +02:00
|
|
|
\varphi(a_1, \dots, a_n)\left(\sum_{\pi \in S_n}\sgn(\pi)\lambda_{1\pi(1)}\cdots\lambda_{n\pi(n)}\right)
|
2022-04-12 12:48:05 +02:00
|
|
|
\]
|
2022-04-28 10:33:22 +02:00
|
|
|
\item Sei $c\in\K\setminus\{0\}$. Dann ist die Abbildung
|
2022-04-12 12:48:05 +02:00
|
|
|
\[
|
2022-06-15 19:34:43 +02:00
|
|
|
\varphi(b_1, \dots, b_n) = c\left(\sum_{\pi \in S_n}\sgn(\pi)\lambda_{1\pi(1)}\cdots\lambda_{n\pi(n)}\right)
|
2022-04-12 12:48:05 +02:00
|
|
|
\]
|
|
|
|
eine alternierende nicht ausgeartete n-Linearform.
|
|
|
|
\end{enumerate}
|
2022-03-30 20:19:11 +02:00
|
|
|
\end{satz}
|
2022-06-08 23:25:28 +02:00
|
|
|
\begin{proof}
|
|
|
|
\begin{enumerate}[label=\alph*)]
|
|
|
|
\item folgt aus dem Beweis von Lemma \ref{theo:1.2.4}.
|
2023-01-31 13:30:38 +01:00
|
|
|
\item Man verifiziert leicht, dass $\varphi$ n-linear ist. Weiters ist $\varphi$
|
|
|
|
nicht ausgeartet, da
|
2022-06-08 23:25:28 +02:00
|
|
|
\[
|
|
|
|
\varphi(a_1, \ldots, a_n) =
|
2022-06-28 14:39:16 +02:00
|
|
|
c\left(\sum_{\pi\in S_n}\sgn(\pi)\delta_{1\pi(1)} \cdots \delta_{n\pi(n)}\right) = c \cdot 1 \neq 0
|
2022-06-08 23:25:28 +02:00
|
|
|
\]
|
|
|
|
z.Z.: $\varphi$ alternierend. Seien $b_1, \dots, b_n$ linear abhängig.\\
|
|
|
|
O.B.d.A. $b_1=\mu_2b_2+\cdots+\mu_nb_n$. Dann gilt
|
2022-06-15 11:32:12 +02:00
|
|
|
\[\varphi(b_1, \dots, b_n) = \sum_{j=2}^{n}\mu_j \varphi(b_j, b_2, \dots, b_n)\]
|
2023-01-31 13:30:38 +01:00
|
|
|
Es genügt also zu zeigen, dass $\varphi(b_1, \dots, b_n) = 0$ falls $b_1 = b_i,
|
|
|
|
i\in\{2, \dots, n\}$. Dann gilt aber $\lambda_{1j}=\lambda_{ij} \forall j$.
|
2022-06-08 23:25:28 +02:00
|
|
|
\begin{align*}
|
2023-01-31 13:30:38 +01:00
|
|
|
\varphi(b_i, \dots, b_i, \dots, b_n) & = c\cdot\sum_{\pi\in S_n} \sgn(\pi) \lambda_{i\pi(1)}\cdots\lambda_{i\pi(i)}\cdots\lambda_{n\pi(n)} \\
|
|
|
|
&
|
|
|
|
\begin{multlined}
|
|
|
|
=c\cdot \Bigg(\sum_{\pi\in A_n}\sgn(\pi)\lambda_{i\pi(i)}\cdots\lambda_{i\pi(i)}\cdots\lambda_{n\pi(n)} \\
|
|
|
|
+\sum_{\pi\in A_n}\underbrace{\sgn(\pi\circ(1i))}_{=-\sgn(\pi)}\lambda_{i\pi(i)}\cdots\lambda_{i\pi(i)}\cdots\lambda_{n\pi(n)}\Bigg)
|
|
|
|
\end{multlined}
|
|
|
|
\\
|
|
|
|
&
|
|
|
|
\begin{multlined}
|
|
|
|
= c\cdot\sum_{\pi\in A_n}(\sgn(\pi)-\sgn(\pi))
|
|
|
|
\cdot \lambda_{i\pi(i)} \cdot \\
|
|
|
|
\cdots\lambda_{i\pi(i)}\cdots\lambda_{n\pi(n)}=0
|
|
|
|
\end{multlined}
|
|
|
|
\end{align*}
|
2022-06-08 23:25:28 +02:00
|
|
|
\end{enumerate}
|
|
|
|
\end{proof}
|
2022-03-30 20:19:11 +02:00
|
|
|
|
|
|
|
\subsubsection{Bemerkung}
|
2023-01-31 13:30:38 +01:00
|
|
|
Es gibt also zu jedem $\K$-VR V mit $\dim(V)=n$ eine nicht ausgeartete
|
|
|
|
alternierende n-Linearform.
|
2022-03-30 20:19:11 +02:00
|
|
|
|
2022-06-08 23:25:28 +02:00
|
|
|
\begin{satz}
|
|
|
|
\label{theo:1.2.6}
|
2022-05-07 19:59:06 +02:00
|
|
|
Sei V $\K$-VR mit $\dim(V)=n$ und $\varphi_1, \varphi_2$ nicht ausgeartete alternierende n-Linearformen.
|
|
|
|
Dann existiert $c\in\K\setminus\{0\}$ mit $\varphi_2=c\cdot\varphi_1$.
|
2022-03-30 20:19:11 +02:00
|
|
|
\end{satz}
|
2022-06-08 23:25:28 +02:00
|
|
|
\begin{proof}
|
|
|
|
Sei $a_1, \dots, a_n$ Basis von V. Nach Lemma \ref{theo:1.2.4} ist
|
|
|
|
$\varphi_i(a_1, \dots, a_n)\neq0, i=1, 2$.\\
|
|
|
|
Sei $c:=\dfrac{\varphi_1(a_1, \dots, a_n)}{\varphi_2(a_1, \dots, a_n)} \in \K\setminus\{0\}$.\\
|
|
|
|
Sei $b_1, \dots, b_n$ mit $b_i=\sum\lambda_{ij}a_j$.\\
|
|
|
|
Dann gilt nach Satz \ref{theo:1.2.5}(a), dass für $i=1, 2$
|
|
|
|
\begin{align*}
|
|
|
|
& \varphi_i(b_1, \dots, b_n) =
|
|
|
|
\varphi_i(a_1, \dots, a_n)\underbrace{\sum_{\pi \in S_n}\lambda_{1\pi(1)}\cdots\lambda_{n\pi(n)}}_
|
|
|
|
{\text{unabhängig von $i$!}} \\
|
|
|
|
& \implies \frac{\varphi_1(b_1, \dots, b_n)}{\varphi_2(b_1, \dots, b_n)}=
|
|
|
|
\frac{\varphi_1(a_1, \dots, a_n)}{\varphi_2(a_1, \dots, a_n)}=c
|
|
|
|
\end{align*}
|
|
|
|
\end{proof}
|
2022-03-30 20:19:11 +02:00
|
|
|
|
|
|
|
\section{Determinanten}
|
|
|
|
\begin{defin}
|
2022-04-28 10:33:22 +02:00
|
|
|
Sei $B=(a_1, \dots, a_n)$ Basis des \K-Vektorraums V.
|
|
|
|
Sei $\varphi$ nicht ausgeartete n-Linearform und $\alpha \in \homkv$.
|
2022-04-27 16:18:26 +02:00
|
|
|
Dann ist die \underline{Determinante von $\alpha$} definiert durch \[
|
2022-04-28 10:33:22 +02:00
|
|
|
\det(\alpha):=\det{}_\K(\alpha)
|
2022-04-27 16:18:26 +02:00
|
|
|
:=\frac{\varphi(\alpha(a_1), \dots, \alpha(a_n))}{\varphi(a_1, \dots, a_n)}
|
|
|
|
\]
|
2022-03-30 20:19:11 +02:00
|
|
|
\end{defin}
|
|
|
|
|
2022-06-08 23:25:28 +02:00
|
|
|
\begin{satz}
|
|
|
|
\label{theo:1.3.2}
|
2022-06-22 10:05:40 +02:00
|
|
|
$\det(\alpha)$ ist unabhängig von der Wahl der Basis B und der Form $\varphi$.
|
2022-03-30 20:19:11 +02:00
|
|
|
\end{satz}
|
2022-06-08 23:25:28 +02:00
|
|
|
\begin{proof}
|
2022-06-10 00:29:02 +02:00
|
|
|
\leavevmode
|
|
|
|
\begin{enumerate}[label=\arabic *. Fall:]
|
|
|
|
\item $\alpha$ nicht bijektiv\\
|
2022-06-22 10:05:40 +02:00
|
|
|
$\implies \alpha(a_1), \dots, \alpha(a_n) \text{ linear abhängig} \implies \det(\alpha) = 0$
|
2022-06-10 00:29:02 +02:00
|
|
|
\item $\alpha$ bijektiv. Sei $B=(a_1, \dots, a_n)$.
|
|
|
|
|
2023-01-31 13:30:38 +01:00
|
|
|
Dann ist auch $\alpha(a_1), \dots, \alpha(a_n)$ Basis und, da $\varphi$ nicht
|
|
|
|
ausgeartet,
|
2022-06-10 00:29:02 +02:00
|
|
|
\[\varphi(\alpha(a_1), \dots, \alpha(a_n))\neq0\]
|
|
|
|
|
2023-01-31 13:30:38 +01:00
|
|
|
Sei $\varphi_\alpha(b_1, \dots, b_n) := \varphi(\alpha(b_1), \dots,
|
|
|
|
\alpha(b_n))$. Dann ist $\varphi_\alpha$ alternierend und nicht ausgeartet.
|
|
|
|
Wegen Satz \ref{theo:1.2.6} folgt, dass $c\in\K\setminus\{0\}$ existiert mit
|
|
|
|
\begin{equation}
|
|
|
|
\label{eq:constantphi}
|
2022-06-10 00:29:02 +02:00
|
|
|
\varphi_\alpha=c\cdot\varphi
|
|
|
|
\end{equation}
|
|
|
|
und (durch Einsetzen von $a_1, \dots, a_n$), dass $c=\det(\alpha)$.
|
|
|
|
Da \ref{eq:constantphi} unabhängig von B ist also $\det(\alpha)$ unabhängig von B.
|
|
|
|
|
|
|
|
Sei nun $\psi$ eine zweite alternierende, nicht ausgeartete n-Form und
|
2023-01-31 13:30:38 +01:00
|
|
|
$\psi_\alpha(b_1, \dots, b_n) := \psi(\alpha(b_1), \dots, \alpha(b_n))$. Dann
|
|
|
|
ist $\psi_\alpha$ alternierend und nicht ausgeartet. Nach Satz \ref{theo:1.2.6}
|
|
|
|
gibt es $d\in\K\setminus\{0\} \text{ mit }d=\frac\psi\varphi$. Also gilt:
|
2022-06-10 00:29:02 +02:00
|
|
|
\[
|
|
|
|
\det(\alpha)=\frac{\varphi_\alpha(a_1, \dots, a_n)}{\varphi(a_1, \dots, a_n)}=
|
|
|
|
\frac{d\varphi_\alpha(a_1, \dots, a_n)}{d\varphi(a_1, \dots, a_n)}=
|
|
|
|
\frac{\psi_\alpha(a_1, \dots, a_n)}{\psi(a_1, \dots, a_n)}
|
|
|
|
\]
|
2022-06-08 23:25:28 +02:00
|
|
|
|
2022-06-10 00:29:02 +02:00
|
|
|
also ist $\det(\alpha)$ auch von der n-Form unabhängig.
|
|
|
|
\end{enumerate}
|
2022-06-08 23:25:28 +02:00
|
|
|
\end{proof}
|
2022-03-30 20:19:11 +02:00
|
|
|
|
2022-06-08 23:25:28 +02:00
|
|
|
\begin{korollar}
|
|
|
|
\label{theo:1.3.3}
|
2022-04-28 10:33:22 +02:00
|
|
|
Sei V ein n-dimensionaler \K-Vektorraum. Dann gilt
|
2022-04-12 12:48:05 +02:00
|
|
|
\begin{enumerate}[label=\alph*)]
|
2022-04-28 10:33:22 +02:00
|
|
|
\item $\alpha\in \homkv \text{ bijektiv } \iff \det(\alpha)\neq0$
|
2022-06-18 18:20:57 +02:00
|
|
|
\item $\alpha, \beta \in \homkv \implies \det(\alpha \beta) = \det(\alpha) \det(\beta)$
|
2022-04-12 12:48:05 +02:00
|
|
|
\item $\det(\id)=1$
|
2023-01-31 13:30:38 +01:00
|
|
|
\item Ist $\alpha\in \homkv$ invertierbar, so gilt
|
|
|
|
$\det(\alpha^{-1})=\det(\alpha)^{-1}$.
|
2022-04-12 12:48:05 +02:00
|
|
|
\end{enumerate}
|
2022-03-30 20:19:11 +02:00
|
|
|
\end{korollar}
|
2022-06-08 23:25:28 +02:00
|
|
|
\begin{proof}
|
|
|
|
Sei $B=(a_1, \dots, a_n)$ Basis und $\varphi$ n-Form mit \[
|
|
|
|
\det(\alpha) = \frac{\varphi(\alpha(a_1), \dots, \alpha(a_n))}{\varphi(a_1, \dots, a_n)}
|
|
|
|
\text{[ unabhängig von $B$ und $\varphi$ nach Satz \ref{theo:1.3.2}]}
|
|
|
|
\]
|
|
|
|
\begin{enumerate}[label=\alph*)]
|
|
|
|
\item $\alpha$ bijektiv $\iff \alpha(a_1), \dots, \alpha(a_n) \text{ linear unabhängig}$\\
|
|
|
|
$\underbrace{\iff}_{\mathclap{\text{Lemma \ref{theo:1.2.4}}}}
|
|
|
|
\varphi(\alpha(a_1), \dots, \alpha(a_n))\neq0\iff \det(\alpha)\neq0$
|
2023-01-31 13:30:38 +01:00
|
|
|
\item 2 Fälle:
|
|
|
|
\begin{enumerate}[label=\arabic*. Fall:]
|
2022-06-10 00:29:02 +02:00
|
|
|
\item $\alpha$ oder $\beta$ ist nicht bijektiv: o.B.d.A $\alpha$ nicht bijektiv.\\
|
2022-06-28 20:39:01 +02:00
|
|
|
$\implies \det(\alpha)=0\implies \det(\alpha)\det(\beta)=0$\\
|
2022-06-10 00:29:02 +02:00
|
|
|
Weiters folgt, dass $\alpha\beta$ nicht bijektiv, also $\det(\alpha\beta)=0$.
|
|
|
|
\item $\alpha, \beta$ bijektiv.
|
|
|
|
Dann ist auch $(\beta(a_1), \dots, \beta(a_n))$ Basis und
|
|
|
|
\begin{align*}
|
|
|
|
\det(\alpha\beta) & = \frac{\varphi(\alpha(\beta(a_1)), \dots, \alpha(\beta(a_n)))}
|
|
|
|
{\varphi(a_1, \dots, a_n)} \\
|
2023-01-31 13:30:38 +01:00
|
|
|
&
|
|
|
|
\begin{multlined}
|
|
|
|
=\frac{\varphi(\alpha(\beta(a_1)), \dots, \alpha(\beta(a_n)))}
|
|
|
|
{\varphi(\beta(a_1), \dots, \beta(a_n))}\cdot
|
|
|
|
\frac{\varphi(\beta(a_1), \dots, \beta(a_n))}
|
|
|
|
{\varphi(a_1, \dots, a_n)}
|
|
|
|
\end{multlined}
|
|
|
|
\\
|
2022-06-18 18:20:57 +02:00
|
|
|
& \underbrace{=}_{\mathclap{\text{Satz \ref{theo:1.3.2}}}}
|
2022-06-10 00:29:02 +02:00
|
|
|
\det(\alpha)\det(\beta)
|
|
|
|
\end{align*}
|
2022-06-08 23:25:28 +02:00
|
|
|
\end{enumerate}
|
|
|
|
\item $\det(\id)=\frac{\varphi(a_1, \dots, a_n)}{\varphi(a_1, \dots, a_n)}=1$
|
|
|
|
\item $1\underbrace{=}_{\text{c)}}\det(\id)=\det(\alpha\alpha^{-1})\underbrace{=}_{\text{b)}}
|
|
|
|
\det(\alpha)\det(\alpha^{-1})$
|
|
|
|
\end{enumerate}
|
|
|
|
\end{proof}
|
2022-03-30 20:19:11 +02:00
|
|
|
|
2022-06-08 23:25:28 +02:00
|
|
|
\begin{satz}
|
|
|
|
\label{theo:1.3.4}
|
2022-05-07 19:59:06 +02:00
|
|
|
Sei $\alpha\in \homkv, B=(b_1, \dots, b_n)$ Basis und $A=(a_{ij}) = {}_B M(\alpha)_B\in\K^{n\times n}$.
|
|
|
|
Dann gilt
|
2022-04-12 12:48:05 +02:00
|
|
|
\[\det(\alpha)=\sum_{\pi\in S_n}\sgn(\pi)a_{1\pi(1)}\cdots a_{n\pi(n)}\]
|
2022-03-30 20:19:11 +02:00
|
|
|
\end{satz}
|
2022-06-08 23:25:28 +02:00
|
|
|
\begin{proof}
|
|
|
|
Es gilt $\alpha(b_i)=\sum\limits_{j=1}^na_{ij}b_j \text{ für }i=1, \dots, n$.
|
|
|
|
Nach Satz \ref{theo:1.2.5}(a) gilt
|
|
|
|
\[
|
|
|
|
\varphi(\alpha(b_1), \dots, \alpha(b_n)) =
|
|
|
|
\varphi(b_1, \dots, b_n)\cdot\sum_{\pi\in S_n}\sgn(\pi)a_{1\pi(1)}\cdots a_{n\pi(n)}
|
|
|
|
\]
|
|
|
|
und daraus folgt die Behauptung direkt.
|
|
|
|
\end{proof}
|
2022-03-30 20:19:11 +02:00
|
|
|
|
|
|
|
\begin{defin}
|
2022-04-28 10:33:22 +02:00
|
|
|
Für $A=(a_{ij})\in\K^{n\times n}$ definieren wir die \underline{Determinante von A} als
|
2022-04-12 12:48:05 +02:00
|
|
|
\[
|
2022-04-28 10:33:22 +02:00
|
|
|
\det(A)=\sum_{\pi\in S_n} \sgn(\pi)a_{1\pi(1)}\cdots a_{n\pi(n)}\in\K
|
2022-04-12 12:48:05 +02:00
|
|
|
\]
|
2022-03-30 20:19:11 +02:00
|
|
|
\end{defin}
|
|
|
|
|
|
|
|
\subsubsection{Bemerkung}
|
|
|
|
Schreibweise für $A=(a_{ij})$:
|
2022-04-04 22:25:02 +02:00
|
|
|
\[
|
2023-01-31 13:30:38 +01:00
|
|
|
\det(A)=
|
|
|
|
\begin{vmatrix}
|
2022-04-12 12:48:05 +02:00
|
|
|
a_{11} & \dots & a_{1n} \\
|
|
|
|
\vdots & \ddots & \vdots \\
|
|
|
|
a_{n1} & \dots & a_{nn}
|
|
|
|
\end{vmatrix}
|
2022-04-04 22:25:02 +02:00
|
|
|
\]
|
2022-03-30 20:19:11 +02:00
|
|
|
|
|
|
|
\section{Rechenregeln}
|
2022-06-08 23:25:28 +02:00
|
|
|
\begin{satz}
|
|
|
|
\label{theo:1.4.1}
|
2022-04-28 10:33:22 +02:00
|
|
|
Sei $A=(a_1, \dots, a_n)\in\K^{n\times n}$. Dann gilt
|
2022-04-12 12:48:05 +02:00
|
|
|
\begin{enumerate}[label=\alph*)]
|
|
|
|
\item $\det(A)=\det(A^T)$
|
2023-03-28 11:46:57 +02:00
|
|
|
\item $\forall i, j\in[n]\colon i<j\colon
|
2022-06-17 11:22:23 +02:00
|
|
|
\det((a_1, \dots, \underbrace{a_j}_{i}, \dots, \underbrace{a_i}_{j}, \dots, a_n))=-\det(A)$
|
2023-03-28 11:46:57 +02:00
|
|
|
\item $\forall i\in[n]\colon \lambda_1, \dots, \lambda_n\in\K\colon \det((a_1, \dots, a_i+
|
2022-05-07 19:59:06 +02:00
|
|
|
\sum\limits_{\substack{j=1\\j\neq i}}^n\lambda_ja_j, \dots, a_n))=\det(A)$
|
2023-03-28 11:46:57 +02:00
|
|
|
\item $\forall i\in[n]\colon \lambda\in\K\colon \det((a_1, \dots, \lambda a_i, \dots, a_n)) = \lambda \det(A)$
|
|
|
|
\item $\exists i, j\in[n]\colon i\neq j\land a_i=a_j \implies \det(A)=0$
|
|
|
|
\item $\forall \lambda \in \K\colon \det(\lambda A)=\lambda^n \det(A)$
|
2022-04-12 12:48:05 +02:00
|
|
|
\item $A$ invertierbar $\implies \det(A^{-1})=\det(A)^{-1}$
|
2023-03-28 11:46:57 +02:00
|
|
|
\item $\forall B \in \K^{n\times n}\colon \det(AB)=\det(A)\det(B)$
|
2022-04-12 12:48:05 +02:00
|
|
|
\item $\det(I_n)=1$
|
|
|
|
\end{enumerate}
|
2022-03-30 20:19:11 +02:00
|
|
|
\end{satz}
|
2022-06-08 23:25:28 +02:00
|
|
|
\begin{proof}
|
|
|
|
Nur a) explizit:
|
|
|
|
\begin{enumerate}[label=\alph*)]
|
2023-01-31 13:30:38 +01:00
|
|
|
\item \begin{equation*}
|
|
|
|
\begin{aligned}
|
2022-06-08 23:25:28 +02:00
|
|
|
\det(A^T) & = \sum_{\pi\in S_n}\sgn(\pi)a_{\pi(1)1}\cdots a_{\pi(n)n} \\
|
|
|
|
& =\sum_{\pi\in S_n}\sgn(\pi)a_{1\pi^{-1}(1)}\cdots a_{n\pi^{-1}(n)} \\
|
|
|
|
& \underbrace{=}_{\substack{\sgn(\pi^{-1})=\sgn(\pi) \\
|
2023-01-31 13:30:38 +01:00
|
|
|
\pi^{-1}\mapsto\pi}} \sum_{\pi\in S_n} \sgn(\pi)a_{1\pi(1)}\cdots a_{n\pi(n)}
|
|
|
|
\end{aligned}
|
|
|
|
\end{equation*}
|
2023-03-28 11:46:57 +02:00
|
|
|
\item[b) - i)] folgt daraus, dass für \[\alpha\colon
|
2023-01-31 13:30:38 +01:00
|
|
|
\begin{cases}
|
|
|
|
\K^n\to\K^n \\
|
|
|
|
x\mapsto A\cdot x
|
|
|
|
\end{cases}
|
2023-03-28 11:46:57 +02:00
|
|
|
\colon
|
2023-01-31 13:30:38 +01:00
|
|
|
\det(A)=\dfrac{a}{b}\text{ (Satz \ref{theo:1.3.4})}\] und, dass $\varphi$ alternierende n-Form ist, beziehungsweise Korollar
|
|
|
|
\ref{theo:1.3.3}
|
2022-06-08 23:25:28 +02:00
|
|
|
\end{enumerate}
|
|
|
|
\end{proof}
|
2022-03-30 20:19:11 +02:00
|
|
|
|
|
|
|
\begin{satz}
|
2022-06-08 23:25:28 +02:00
|
|
|
|
2023-01-31 13:30:38 +01:00
|
|
|
Seien $A, B\in\K^{n\times n}$ ähnlich, das heißt $\exists P\in\K^{n\times n}$
|
|
|
|
invertierbar mit \\ $B=P^{-1}\cdot A\cdot P$. Dann gilt
|
2022-04-12 12:48:05 +02:00
|
|
|
\[\det(A)=\det(B)\]
|
|
|
|
Weiters ist A genau dann invertierbar wenn $\det(A)\neq0$.
|
2022-03-30 20:19:11 +02:00
|
|
|
\end{satz}
|
2022-06-08 23:25:28 +02:00
|
|
|
\begin{proof}
|
|
|
|
\[\det(B)=\det(P)\underbrace{\det(P^{-1})}_{=\det(P)^{-1}}\det(A)=\det(A)\]
|
2023-03-28 11:46:57 +02:00
|
|
|
Rest folgt, da $\det(A)=\det(\alpha)$ mit $\alpha\colon
|
2023-01-31 13:30:38 +01:00
|
|
|
\begin{cases}
|
|
|
|
\K^n\to\K^n \\
|
|
|
|
x\mapsto A\cdot x
|
|
|
|
\end{cases}
|
|
|
|
$.
|
2022-06-08 23:25:28 +02:00
|
|
|
\end{proof}
|
2022-03-30 20:19:11 +02:00
|
|
|
|
|
|
|
\subsubsection{Berechnungsverfahren}
|
2022-04-03 18:10:13 +02:00
|
|
|
Gaußalgorithmus führt 1) Zeilenvertauschungen und 2) Additionen von\\
|
2023-01-31 13:30:38 +01:00
|
|
|
Vielfachen einer Zeile zu einer anderen durch. Raus kommt eine obere
|
|
|
|
Dreiecksmatrix.
|
|
|
|
\begin{equation}
|
|
|
|
\label{eq:dreiecksmatrix}
|
|
|
|
B=
|
|
|
|
\begin{pmatrix}
|
2022-04-12 12:48:05 +02:00
|
|
|
b_{11} & \dots & \dots & b_{1n} \\
|
|
|
|
0 & b_{22} & \dots & b_{2n} \\
|
|
|
|
\vdots & & \ddots & \vdots \\
|
|
|
|
0 & \dots & \dots & b_{nn}
|
|
|
|
\end{pmatrix}
|
2022-03-30 20:19:11 +02:00
|
|
|
\end{equation}
|
2022-06-17 11:22:23 +02:00
|
|
|
Die Operationen von 2) ändern die Determinante gar nicht, die Operationen von 1) ändern das Vorzeichen.
|
2022-03-30 20:19:11 +02:00
|
|
|
|
|
|
|
\begin{satz}
|
2022-06-08 23:25:28 +02:00
|
|
|
|
2023-01-31 13:30:38 +01:00
|
|
|
Sei $A\in\K^{n\times n}$ und $B$ wie \ref{eq:dreiecksmatrix} das Resultat des
|
|
|
|
Gaußalgorithmus auf $A$ angewendet mit $k$ Zeilenvertauschungen. Dann gilt
|
2022-04-12 12:48:05 +02:00
|
|
|
\[
|
|
|
|
\det(A)=(-1)^kb_{11}\cdot\dots\cdot b_{nn}
|
|
|
|
\]
|
2022-03-30 20:19:11 +02:00
|
|
|
\end{satz}
|
2022-06-08 23:25:28 +02:00
|
|
|
\begin{proof}
|
|
|
|
Für Matrizen der Form \ref{eq:dreiecksmatrix} ist die Determinante das Produkt der Diagonalelemente.
|
|
|
|
Rest folgt aus der Definition des Gaußalgorithmus, sowie Satz \ref{theo:1.4.1}.
|
|
|
|
\end{proof}
|
2022-03-30 20:19:11 +02:00
|
|
|
|
|
|
|
\subsubsection{Regel von Sarrus}
|
2023-01-31 13:30:38 +01:00
|
|
|
Sei $A=
|
|
|
|
\begin{pmatrix}
|
2022-04-12 12:48:05 +02:00
|
|
|
a_{11} & a_{12} & a_{13} \\
|
|
|
|
a_{21} & a_{22} & a_{23} \\
|
|
|
|
a_{31} & a_{32} & a_{33}
|
2023-01-31 13:30:38 +01:00
|
|
|
\end{pmatrix}
|
|
|
|
\in\K^{3\times3}$
|
2022-05-07 23:36:49 +02:00
|
|
|
\[
|
|
|
|
\begin{array}{ccccccccc}
|
2022-04-12 12:48:05 +02:00
|
|
|
a_{11} & & a_{12} & & a_{13} & & a_{11} & & a_{12} \\
|
|
|
|
& \color{ForestGreen}\diagdown & & \color{ForestGreen}\diagdown \color{red} \mathllap \diagup & & \color{ForestGreen}\diagdown \color{red} \mathllap \diagup & & \color{red}\diagup \\
|
|
|
|
a_{21} & & a_{22} & & a_{23} & & a_{21} & & a_{22} \\
|
|
|
|
& \color{red} \diagup & & \color{ForestGreen}\diagdown \color{red} \mathllap \diagup & & \color{ForestGreen}\diagdown \color{red} \mathllap \diagup & & \color{ForestGreen}\diagdown \\
|
|
|
|
a_{31} & & a_{32} & & a_{33} & & a_{31} & & a_{32}
|
2023-01-31 13:30:38 +01:00
|
|
|
\end{array}
|
|
|
|
\color{ForestGreen} + + + \color{red} - - -
|
2022-05-07 23:36:49 +02:00
|
|
|
\]
|
2022-03-30 22:06:16 +02:00
|
|
|
|
2023-01-31 13:30:38 +01:00
|
|
|
$A=
|
|
|
|
\begin{pmatrix}
|
|
|
|
a_{11} & a_{12} \\
|
|
|
|
a_{21} & a_{22}
|
|
|
|
\end{pmatrix}
|
2022-05-07 23:36:49 +02:00
|
|
|
\in\K^{2\times2} \implies \det(A)=a_{11}a_{22}-a_{12}a_{21}$\\
|
2022-03-30 22:06:16 +02:00
|
|
|
$n>3 \to $ Gaußalgorithmus
|
2022-03-30 20:19:11 +02:00
|
|
|
|
|
|
|
\begin{defin}
|
2022-05-07 23:36:49 +02:00
|
|
|
Sei $A\in\K^{n\times n}$ und $i, j\in[n]$. Sei $M_{ij}\in\K^{n\times n}$ die Matrix,
|
|
|
|
welche durch Ersetzung der j-ten Spalte durch den i-ten Einheitsvektor $e_j$ entsteht.\\
|
2022-04-12 12:48:05 +02:00
|
|
|
$A_{ij}:=\det(M_{ij})$ heißt \underline{Kofaktor} (zum Indexpaar $(i, j)$).
|
|
|
|
\begin{equation*}
|
|
|
|
\bordermatrix{
|
|
|
|
&&&&j \cr
|
|
|
|
&a_{11}&\dots &a_{1i-1}&0&a_{1i+1}&\dots&a_{1n} \cr
|
|
|
|
&\vdots&\ddots&\vdots&\vdots&\vdots&\ddots&\vdots \cr
|
2022-06-28 20:39:01 +02:00
|
|
|
i&a_{j1}&\dots&a_{ji-1}&1&a_{ji+1}&\dots&a_{jn}\cr
|
2022-04-12 12:48:05 +02:00
|
|
|
&\vdots&\ddots&\vdots&\vdots&\vdots&\ddots&\vdots \cr
|
|
|
|
&a_{n1}&\dots &a_{ni-1}&0&a_{ni+1}&\dots&a_{nn}
|
|
|
|
}
|
2022-06-01 16:29:47 +02:00
|
|
|
\genfrac{}{}{0pt}{0}{= M_{ij}}{=(a_{\_1}, \dots, \underbrace{e_i}_{j}, \dots, a_{\_n})}
|
2022-04-12 12:48:05 +02:00
|
|
|
\end{equation*}
|
|
|
|
\end{defin}
|
|
|
|
|
|
|
|
\subsubsection{Bemerkung}
|
|
|
|
Es gilt
|
2023-01-31 13:30:38 +01:00
|
|
|
\begin{equation}
|
|
|
|
\label{eq:1.4.4.1}
|
|
|
|
A_{ij}=
|
|
|
|
\begin{vmatrix}
|
2022-04-12 12:48:05 +02:00
|
|
|
a_{11} & \dots & a_{1i-1} & 0 & a_{1i+1} & \dots & a_{1n} \\
|
|
|
|
\vdots & \ddots & \vdots & \vdots & \vdots & \ddots & \vdots \\
|
2022-06-28 20:39:01 +02:00
|
|
|
0 & \dots & 0 & 1 & 0 & \dots & 0 \\
|
2022-04-12 12:48:05 +02:00
|
|
|
\vdots & \ddots & \vdots & \vdots & \vdots & \ddots & \vdots \\
|
|
|
|
a_{n1} & \dots & a_{ni-1} & 0 & a_{ni+1} & \dots & a_{nn}
|
|
|
|
\end{vmatrix}
|
|
|
|
\end{equation}
|
|
|
|
da obige Matrix aus $M_{ij}$ durch Spaltenadditionen hervorgeht.
|
|
|
|
|
|
|
|
\begin{lemma}
|
2022-06-08 23:25:28 +02:00
|
|
|
|
2023-01-31 13:30:38 +01:00
|
|
|
Sei $\tilde{A_{ij}}\in\K^{(n-1)\times(n-1)}$ die Matrix, welche aus A durch
|
|
|
|
Streichung der i-ten Spalte und j-ten Zeile hervorgeht und
|
|
|
|
$D_{ij}:=\det(\tilde{A_{ij}})$. Dann gilt \[A_{ij}=(-1)^{i+j}D_{ij}\]
|
2022-04-12 12:48:05 +02:00
|
|
|
\end{lemma}
|
2022-06-08 23:25:28 +02:00
|
|
|
\begin{proof}
|
|
|
|
Transformiere durch ($i-1$) Spaltenvertauschungen und ($j-1$) Zeilenvertauschungen die Matrix
|
|
|
|
\ref{eq:1.4.4.1} auf
|
|
|
|
\[
|
2023-01-31 13:30:38 +01:00
|
|
|
B_{ij} =
|
|
|
|
\begin{pmatrix}
|
2022-06-08 23:25:28 +02:00
|
|
|
1 & 0 & \dots & 0 \\
|
|
|
|
0 & & & \\
|
|
|
|
\vdots & & \tilde{A_{ij}} & \\
|
|
|
|
0 & & &
|
|
|
|
\end{pmatrix}
|
|
|
|
\]
|
2022-06-13 10:53:40 +02:00
|
|
|
Es gilt $\abs{B_{ij}}=D_{ij}$ und $\abs{B_{ij}}=(-1)^{(i-1)+j(-1)}A_{ij}$,
|
2022-06-08 23:25:28 +02:00
|
|
|
woraus die Behauptung folgt.
|
|
|
|
\end{proof}
|
2022-04-12 12:48:05 +02:00
|
|
|
|
2023-01-31 13:30:38 +01:00
|
|
|
\begin{satz}[Entwicklungssatz von Laplace]
|
2022-04-28 10:33:22 +02:00
|
|
|
Sei $A\in\K^{n\times n}$ und $i, j\in[n]$. Dann gilt
|
2022-04-12 12:48:05 +02:00
|
|
|
\begin{enumerate}[label=\alph*)]
|
|
|
|
\item $\det(A) = \sum\limits_{l=1}^na_{il}A_{il} = \sum\limits_{l=1}^n(-1)^{l+i}a_{il}D_{il}$
|
|
|
|
\item $\det(A) = \sum\limits_{l=1}^na_{lj}A_{lj} = \sum\limits_{l=1}^n(-1)^{l+j}a_{lj}D_{lj}$
|
|
|
|
\end{enumerate}
|
2022-04-12 12:36:21 +02:00
|
|
|
\end{satz}
|
2022-06-08 23:25:28 +02:00
|
|
|
\begin{proof}
|
2023-01-31 13:30:38 +01:00
|
|
|
b) \[
|
|
|
|
\begin{aligned}
|
2022-06-18 18:20:57 +02:00
|
|
|
\det(A) & = \det(a_{\_1}, \dots, a_{\_n}) \\
|
|
|
|
& =\det(a_{\_1}, \dots, \underbrace{\sum_{l=1}^na_{lj}e_l}_{=a_{\_j}}, \dots, a_{\_n}) \\
|
|
|
|
& =\sum_{l=1}^n a_{lj}\det(a_{\_1}, \dots, \underbrace{e_l}_{j}, \dots, a_{\_n}) \\
|
2022-06-08 23:25:28 +02:00
|
|
|
& = \sum_{l=1}^n a_{lj}A_{lj}
|
|
|
|
\end{aligned}
|
|
|
|
\]
|
|
|
|
a) analog (angewendet auf $A^T$).
|
|
|
|
\end{proof}
|
2022-04-04 20:02:02 +02:00
|
|
|
|
2022-06-28 14:39:16 +02:00
|
|
|
\begin{satz}[Cramer'sche Regel]
|
|
|
|
\label{theo:1.4.7}
|
2022-04-12 12:48:05 +02:00
|
|
|
Sei $\adj(A)=(A_{ji})_{i, j\in[n]}$. Dann gilt
|
|
|
|
\[A\cdot \adj(A) = \det(A)\cdot I_n\]
|
2022-04-12 12:36:21 +02:00
|
|
|
\end{satz}
|
2022-06-08 23:25:28 +02:00
|
|
|
\begin{proof}
|
|
|
|
Sei $B=A\cdot\adj(A)\implies$
|
2023-01-31 13:30:38 +01:00
|
|
|
\[
|
|
|
|
\begin{aligned}
|
2022-06-08 23:25:28 +02:00
|
|
|
b_{ij} & = \sum_{k=1}^n a_{ik} A_{jk} \\
|
|
|
|
& = \sum_{k=1}^n a_{ik} \det(a_{\_1}, \dots, \underbrace{e_j}_{k}, \dots, a_{\_n}) \\
|
2022-06-28 20:39:01 +02:00
|
|
|
& = \sum_{k=1}^n a_{ik} \cdot
|
|
|
|
\det \left(
|
2022-06-08 23:25:28 +02:00
|
|
|
\bordermatrix{
|
|
|
|
& & & k & & \\
|
|
|
|
& a_{11} & \dots & a_{1k} & \dots & a_{1n} \\
|
|
|
|
& \vdots & \ddots & \vdots & \ddots & \vdots \\
|
|
|
|
j & 0 & \dots & 1 & \dots & 0 \\
|
|
|
|
& \vdots & \ddots & \vdots & \ddots & \vdots \\
|
|
|
|
& a_{n1} & \dots & a_{nk} & \dots & a_{nn} \\
|
2022-06-28 20:39:01 +02:00
|
|
|
} \right) \\
|
2022-06-08 23:25:28 +02:00
|
|
|
& = \det\left(\bordermatrix{ & \\
|
2022-06-28 20:39:01 +02:00
|
|
|
& a_{1\_} \\
|
2023-01-31 13:30:38 +01:00
|
|
|
& \vdots \\
|
|
|
|
j \to & a_{i\_} \\
|
|
|
|
& \vdots \\
|
|
|
|
& a_{n\_}}\right) \\
|
|
|
|
& =
|
|
|
|
\begin{cases}
|
|
|
|
0 & i\neq j \\
|
|
|
|
\det(A) & i=j
|
|
|
|
\end{cases}
|
|
|
|
\end{aligned}
|
|
|
|
\]
|
2022-06-08 23:25:28 +02:00
|
|
|
\end{proof}
|
2022-04-04 20:02:02 +02:00
|
|
|
|
2022-04-12 12:36:21 +02:00
|
|
|
\begin{folgerung}
|
2022-06-09 23:44:55 +02:00
|
|
|
Sei $A\in\K^{n\times n}$ invertierbar. Sei $x\in\K^n$ die eindeutige Lösung des linearen Gleichungssystems
|
2022-05-07 23:36:49 +02:00
|
|
|
$Ax=b$. Dann gilt
|
2022-04-12 12:48:05 +02:00
|
|
|
\[
|
|
|
|
x_i= \det(A)^{-1} \det(a_{\_1}, \dots, \underbrace{b}_{i}, \dots, a_{\_n})
|
|
|
|
\]
|
2022-04-12 12:36:21 +02:00
|
|
|
\end{folgerung}
|
2022-06-08 23:25:28 +02:00
|
|
|
\begin{proof}
|
2023-01-31 13:30:38 +01:00
|
|
|
\[
|
|
|
|
\begin{aligned}
|
2022-06-28 20:39:01 +02:00
|
|
|
& A^{-1}=\frac{1}{\det(A)}(\adj(A)) \\
|
2022-06-08 23:25:28 +02:00
|
|
|
& \implies \det(A)x_i=\sum_{j=1}^n A_{ji}b_j
|
|
|
|
& = \sum_{j=1}^n b_j \det(a_{\_1}, \dots, \underbrace{e_j}_{i}, \dots, a_{\_n}) \\
|
|
|
|
&
|
|
|
|
& =\det(a_{\_1}, \dots, \underbrace{b}_{i}, \dots, a_{\_n})
|
2023-01-31 13:30:38 +01:00
|
|
|
\end{aligned}
|
|
|
|
\]
|
2022-06-08 23:25:28 +02:00
|
|
|
\end{proof}
|
2022-04-04 20:02:02 +02:00
|
|
|
|
2022-04-12 12:36:21 +02:00
|
|
|
\subsubsection{Blockmatrizen}
|
2022-04-04 20:02:02 +02:00
|
|
|
|
2022-04-12 12:36:21 +02:00
|
|
|
\begin{defin}
|
2022-05-07 23:36:49 +02:00
|
|
|
$A\in\K^{n\times n}$ heißt \underline{obere Blockmatrix} wenn $\exists p\in \{1, \dots, n-1\}$ mit $a_{ij}=0$
|
|
|
|
für $p+1\le i\le n, 1\le j\le p$, d.h.
|
2022-04-12 12:48:05 +02:00
|
|
|
\begin{equation}
|
|
|
|
\label{blockmatrix}
|
|
|
|
A=\bordermatrix{
|
|
|
|
\ &\overbrace{}^{p} & \overbrace{}^{n-p} \cr
|
|
|
|
p\{\ & P & D \cr % } TODO geschwungene Klammern besser machen
|
|
|
|
n-p\{\ &0&Q} % }
|
|
|
|
\end{equation}
|
|
|
|
Analog sind \underline{untere Blockmatrizen} definiert.
|
|
|
|
\end{defin}
|
|
|
|
|
2022-06-08 23:25:28 +02:00
|
|
|
\begin{satz}
|
|
|
|
\label{theo:1.4.10}
|
2022-04-12 12:48:05 +02:00
|
|
|
Sei $A$ obere Blockmatrix wie in \ref{blockmatrix}. Dann gilt $\det(A)= \det(P) \det(Q)$
|
|
|
|
\end{satz}
|
2022-06-08 23:25:28 +02:00
|
|
|
\begin{proof}
|
2023-01-31 13:30:38 +01:00
|
|
|
Sei $A =
|
|
|
|
\begin{pmatrix}
|
|
|
|
P & D \\
|
|
|
|
0 & Q
|
|
|
|
\end{pmatrix}
|
|
|
|
$.\\
|
2022-06-08 23:25:28 +02:00
|
|
|
Wende elementare Zeilenumformungen der ersten $p$ Zeilen an, sodass $P$ obere Dreiecksform hat
|
|
|
|
(mit $s$ Zeilenvertauschungen) und elementare Zeilenumformungen der letzten $n-p$ Zeilen sodass
|
|
|
|
$Q$ obere Dreiecksform hat (mit $t$ Zeilenvertauschungen).
|
2023-01-31 13:30:38 +01:00
|
|
|
Bezeichne das Ergebnis mit $A'=
|
|
|
|
\begin{pmatrix}
|
|
|
|
P' & D' \\
|
|
|
|
0 & Q'
|
|
|
|
\end{pmatrix}
|
|
|
|
$, wobei $P', Q'$ obere
|
2022-06-08 23:25:28 +02:00
|
|
|
Dreiecksform haben.\\
|
|
|
|
Es folgt, dass $A', P', Q'$ obere Dreiecksform hat. Da die Determinante oberer Dreiecksmatrizen
|
|
|
|
das Produkt der Diagonalelemente ist, gilt $\det(A')=\det(P')\det(Q')$.\\
|
|
|
|
Weiters gilt $\det(A')=(-1)^{s+t} \det(A)$ (insgesamt $s+t$ Vertauschungen)
|
|
|
|
und $\det(P')= (-1)^s \det(P), \det(Q') = (-1)^t \det(Q)$. Daraus folgt die Behauptung.
|
|
|
|
\end{proof}
|
2022-04-12 12:48:05 +02:00
|
|
|
|
|
|
|
\chapter{Eigenwerte und Eigenvektoren}
|
|
|
|
|
|
|
|
\section{Diagonalisierbarkeit}
|
|
|
|
|
|
|
|
\begin{defin}
|
2023-03-28 11:46:57 +02:00
|
|
|
$D\in \K^{n\times n}$ heißt \underline{Diagonalmatrix} wenn $\forall i\neq j\colon d_{ij}=0$.
|
2022-04-12 12:48:05 +02:00
|
|
|
Wir schreiben auch
|
|
|
|
\[
|
2023-01-31 13:30:38 +01:00
|
|
|
\diag(\lambda_1, \dots, \lambda_n):=
|
|
|
|
\begin{pmatrix}
|
2022-04-12 12:48:05 +02:00
|
|
|
\lambda_1 & 0 & \dots & 0 \\
|
|
|
|
0 & \lambda_2 & \dots & 0 \\
|
|
|
|
\vdots & \vdots & \ddots & \vdots \\
|
|
|
|
0 & 0 & \dots & \lambda_n
|
|
|
|
\end{pmatrix}
|
|
|
|
\]
|
|
|
|
\end{defin}
|
|
|
|
|
|
|
|
\subsubsection{Bemerkung}
|
|
|
|
\begin{itemize}
|
2023-01-31 13:30:38 +01:00
|
|
|
\item $A\in \K^{n\times m} \implies \diag(\lambda_1, \dots, \lambda_n)A =
|
|
|
|
\begin{pmatrix}
|
2022-04-12 12:48:05 +02:00
|
|
|
\lambda_1 a_{1\_} \\
|
|
|
|
\vdots \\
|
2023-01-31 13:30:38 +01:00
|
|
|
\lambda_n a_{n\_}
|
|
|
|
\end{pmatrix}
|
|
|
|
$
|
2022-04-12 12:48:05 +02:00
|
|
|
\item $\diag(\lambda_1, \dots, \lambda_n)^k = \diag(\lambda_1^k, \dots, \lambda_n^k)$
|
|
|
|
\end{itemize}
|
|
|
|
|
|
|
|
\begin{defin}
|
|
|
|
\begin{enumerate}[label=\alph*)]
|
2022-04-28 10:33:22 +02:00
|
|
|
\item $\alpha \in \homkv, \dim(V)<\infty$ heißt \underline{diagonalisierbar} (bzgl. $B$)
|
2022-04-12 12:48:05 +02:00
|
|
|
wenn eine geordnete Basis $B$ existiert mit ${}_B M(\alpha)_B$ Diagonalmatrix
|
2022-05-07 23:36:49 +02:00
|
|
|
\item $A\in\K^{n\times n}$ heißt diagonalisierbar wenn eine invertierbare Matrix $P\in\K^{n\times n}$
|
|
|
|
existiert mit $P^{-1}AP$ Diagonalmatrix.
|
2022-04-12 12:48:05 +02:00
|
|
|
\end{enumerate}
|
|
|
|
\end{defin}
|
|
|
|
|
|
|
|
\begin{lemma}
|
2022-06-08 23:25:28 +02:00
|
|
|
|
2023-01-31 13:30:38 +01:00
|
|
|
Sei $V$ ein $\K$-Vektorraum mit $\dim(V)=n<\infty$. \\ Dann gilt für
|
|
|
|
$\alpha\in\homkv$ und $C$ Basis:
|
2022-04-12 12:48:05 +02:00
|
|
|
\[\alpha \text{ diagonalisierbar} \iff {}_C M(\alpha)_C \text{ diagonalisierbar}\]
|
|
|
|
\end{lemma}
|
2022-06-08 23:25:28 +02:00
|
|
|
\begin{proof}
|
|
|
|
\begin{itemize}
|
2022-06-13 11:44:23 +02:00
|
|
|
\item[$\implies$:] Sei $\alpha$ diagonalisierbar und $B$ eine Basis mit $_B M(\alpha)_B$ Diagonalmatrix.
|
2022-06-08 23:25:28 +02:00
|
|
|
Dann gilt
|
2023-01-31 13:30:38 +01:00
|
|
|
\begin{align*}{}_B M(\alpha)_B & = {}_B M(\id)_C \cdot {}_C M(\alpha)_C \cdot {}_C M(\id)_B \\
|
|
|
|
& = {{}_C M(\id)_{B}}^{-1} \cdot {}_C M(\alpha)_C \cdot {}_C M(\id)_B
|
2022-06-08 23:25:28 +02:00
|
|
|
\end{align*}
|
|
|
|
Also ist ${}_C M(\alpha)_C$ diagonalisierbar.
|
2022-06-13 11:44:23 +02:00
|
|
|
\item[$\impliedby$:] Sei ${}_C M(\alpha)_C$ diagonalisierbar und $P$ invertierbar mit
|
2022-06-08 23:25:28 +02:00
|
|
|
$P^{-1}\cdot {}_C M(\alpha)_C \cdot P$ Diagonalmatrix.
|
|
|
|
Sei $B$ Basis mit $P={}_C M(\id)_B$.
|
|
|
|
Dann gilt ${}_B M(\alpha)_B$ ist Diagonalmatrix.
|
|
|
|
\end{itemize}
|
|
|
|
\end{proof}
|
2022-04-12 12:48:05 +02:00
|
|
|
|
2022-06-08 23:25:28 +02:00
|
|
|
\begin{lemma}
|
|
|
|
\label{theo:2.1.4}
|
2022-04-12 12:48:05 +02:00
|
|
|
\begin{enumerate}[label=\alph*)]
|
2022-05-07 23:36:49 +02:00
|
|
|
\item $\alpha \in \homkv$ ist diagonalisierbar genau wenn es eine Basis
|
|
|
|
$B=(b_1, \dots, b_n)$ und $\lambda_1, \dots, \lambda_n\in\K$ gibt mit
|
2023-03-28 11:46:57 +02:00
|
|
|
$\forall i=1, \dots, n\colon\alpha(b_i)=\lambda_i b_i$.
|
2022-05-07 23:36:49 +02:00
|
|
|
\item $A\in\K^{n\times n}$ ist diagonalisierbar genau wenn es eine geordnete Basis $B= (b_1, \dots, b_n)$
|
2023-03-28 11:46:57 +02:00
|
|
|
von $\K^n$ gibt mit $\forall i=1, \dots, n\colon A b_i = \lambda_i b_i$.
|
2022-04-12 12:48:05 +02:00
|
|
|
\end{enumerate}
|
|
|
|
\end{lemma}
|
2022-06-08 23:25:28 +02:00
|
|
|
\begin{proof}
|
|
|
|
\begin{enumerate} [label=\alph*)]
|
|
|
|
\item die Bedingung ist äquivalent zu ${}_B M(\alpha)_B$ diagonalisierbar.
|
|
|
|
\item Spezialfall von a).
|
|
|
|
\end{enumerate}
|
|
|
|
\end{proof}
|
2022-04-12 12:48:05 +02:00
|
|
|
|
|
|
|
\section{Eigenwerte und Eigenvektoren}
|
|
|
|
|
2023-01-31 13:30:38 +01:00
|
|
|
\begin{defin}
|
|
|
|
\label{theo:2.2.1}
|
2022-04-12 12:48:05 +02:00
|
|
|
\begin{enumerate}[label=\alph*)]
|
2023-01-31 13:30:38 +01:00
|
|
|
\item Sei $\alpha \in \homkv$. $\lambda\in\K$ heißt \underline{Eigenwert} von
|
|
|
|
$\alpha$ wenn es einen Vektor $v\in V\setminus\{0\}$ gibt mit
|
|
|
|
$\alpha(v)=\lambda v$. $v$ heißt \underline{Eigenvektor} zu $\lambda$.\\ Die
|
|
|
|
Menge aller Eigenwerte von $\alpha$ heißt \underline{Spektrum} von $\alpha;
|
|
|
|
\spec(\alpha)$
|
|
|
|
\item Sei $A \in \K^{n\times n}$. $\lambda\in\K$ heißt \underline{Eigenwert} von $A$
|
|
|
|
wenn es $v\in \K^n\setminus\{0\}$ gibt mit $A v = \lambda v$. $v$ heißt
|
|
|
|
\underline{Eigenvektor} zu $\lambda$.\\ Die Menge aller Eigenwerte von $A$
|
|
|
|
heißt \underline{Spektrum} von $A; \spec(A)$
|
2022-04-12 12:48:05 +02:00
|
|
|
\end{enumerate}
|
|
|
|
\end{defin}
|
|
|
|
|
2022-06-08 23:25:28 +02:00
|
|
|
\begin{lemma}
|
|
|
|
\label{theo:2.2.2}
|
2022-04-12 12:48:05 +02:00
|
|
|
\begin{enumerate}[label=\alph*)]
|
2022-04-28 10:33:22 +02:00
|
|
|
\item $\alpha \in \homkv$ diagonalisierbar $\iff \exists$ Basis aus Eigenvektoren.
|
2022-04-12 12:48:05 +02:00
|
|
|
\item $A \in \K^{n\times n}$ diagonalisierbar $\iff \exists$ Basis aus Eigenvektoren.
|
|
|
|
\end{enumerate}
|
|
|
|
\end{lemma}
|
2022-06-08 23:25:28 +02:00
|
|
|
\begin{proof}
|
|
|
|
Folgt direkt aus Lemma \ref{theo:2.1.4} und Definition \ref{theo:2.2.1}
|
|
|
|
\end{proof}
|
2022-04-12 12:48:05 +02:00
|
|
|
|
|
|
|
\begin{defin}
|
|
|
|
\begin{enumerate}[label=\alph*)]
|
2023-01-31 13:30:38 +01:00
|
|
|
\item Sei $\alpha \in \homkv$ und $\lambda \in \spec(\alpha)$. Dann heißt
|
2023-03-28 11:46:57 +02:00
|
|
|
$\eig_\alpha(\lambda):=\{v\in V\colon \alpha(v) = \lambda v \}$ der zugehörige
|
2023-01-31 13:30:38 +01:00
|
|
|
\underline{Eigenraum}.
|
|
|
|
\item Sei $A \in \K^{n\times n}$ und $\lambda \in \spec(A)$. Dann heißt
|
2023-03-28 11:46:57 +02:00
|
|
|
$\eig_A(\lambda):=\{v\in \K^n\colon A v = \lambda v \}$ der zugehörige
|
2023-01-31 13:30:38 +01:00
|
|
|
\underline{Eigenraum}.
|
2022-04-12 12:48:05 +02:00
|
|
|
\end{enumerate}
|
|
|
|
\end{defin}
|
|
|
|
|
|
|
|
\begin{lemma}
|
2022-06-08 23:25:28 +02:00
|
|
|
|
2023-01-31 13:30:38 +01:00
|
|
|
Sei $\alpha \in \homkv / A\in\K^{n\times n}$ und $\lambda \in
|
|
|
|
\spec(\alpha)/\lambda\in\spec(A)$.\\ Dann ist
|
|
|
|
$\eig_\alpha(\lambda)/\eig_A(\lambda)$ ein Unterraum von $V/\K$.
|
2022-04-12 12:48:05 +02:00
|
|
|
\end{lemma}
|
2022-06-08 23:25:28 +02:00
|
|
|
\begin{proof}
|
|
|
|
Nur für $\alpha\in\homkv$
|
|
|
|
\begin{itemize}
|
|
|
|
\item $ 0 = \alpha(0) = \lambda \cdot 0 \implies 0 \in \eig_\alpha(\lambda) $
|
|
|
|
\item $v, w\in \eig_\alpha(\lambda) \implies \alpha(v+w)
|
|
|
|
= \alpha(v) + \alpha(w) = \lambda v + \lambda w
|
|
|
|
= \lambda(v + w) \implies v + w \in \eig_\alpha(V)$
|
|
|
|
\item $\mu \in \K, v \in \eig_\alpha(\lambda) \implies \alpha(\mu v) =
|
|
|
|
\mu \cdot \alpha(v) = \mu \cdot \lambda \cdot v =
|
|
|
|
\lambda \cdot (\mu \cdot v) \implies \mu \cdot v \in \eig_\alpha(\lambda)$
|
|
|
|
\end{itemize}
|
|
|
|
\end{proof}
|
2022-04-12 12:48:05 +02:00
|
|
|
|
|
|
|
\begin{satz}
|
2022-06-08 23:25:28 +02:00
|
|
|
|
2022-04-28 10:33:22 +02:00
|
|
|
Sei $\alpha \in \homkv$ und $B$ Basis. Dann gilt
|
2022-05-07 23:36:49 +02:00
|
|
|
\begin{align*}
|
|
|
|
& \spec(\alpha) = \spec({}_B M(\alpha)_B) \\
|
|
|
|
& {}_B\Phi(\eig_\alpha(\lambda)) = \eig_{{}_B M(\alpha)_B}(\lambda)
|
|
|
|
\end{align*}
|
2022-04-12 12:48:05 +02:00
|
|
|
\end{satz}
|
2022-06-08 23:25:28 +02:00
|
|
|
\begin{proof}
|
|
|
|
Sei $\lambda \in \spec(\alpha)$ und $v\in\eig_\alpha(\lambda)$. Dann gilt
|
|
|
|
\[
|
|
|
|
\alpha(v) = \lambda v \iff {}_B M(\alpha)_B \cdot {}_B v = \lambda \cdot {}_B v
|
|
|
|
\]
|
|
|
|
\end{proof}
|
2022-04-12 12:48:05 +02:00
|
|
|
|
|
|
|
\begin{defin}
|
|
|
|
\begin{enumerate}[label=\alph*)]
|
2022-05-07 23:36:49 +02:00
|
|
|
\item Sei $\alpha \in \homkv, \dim(V)<\infty$ und $B$ Basis. Dann heißt die Funktion
|
|
|
|
\[
|
2023-03-28 11:46:57 +02:00
|
|
|
\chi_\alpha\colon
|
2023-01-31 13:30:38 +01:00
|
|
|
\begin{cases}
|
|
|
|
\K \to \K \\
|
|
|
|
\lambda \mapsto \det({}_B M(\alpha)_B - \lambda \cdot I_n)
|
|
|
|
\end{cases}
|
2022-05-07 23:36:49 +02:00
|
|
|
\]
|
|
|
|
\underline{charakteristisches Polynom} von $\alpha$.
|
|
|
|
\item Sei $A \in \K^{n\times n}$. Dann heißt die Funktion
|
|
|
|
\[
|
2023-03-28 11:46:57 +02:00
|
|
|
\chi_A\colon
|
2023-01-31 13:30:38 +01:00
|
|
|
\begin{cases}
|
|
|
|
\K \to \K \\
|
|
|
|
\lambda \mapsto \det(A - \lambda \cdot I_n)
|
|
|
|
\end{cases}
|
2022-05-07 23:36:49 +02:00
|
|
|
\]
|
|
|
|
\underline{charakteristisches Polynom} von $A$.
|
2022-04-12 12:48:05 +02:00
|
|
|
\end{enumerate}
|
|
|
|
\end{defin}
|
|
|
|
|
|
|
|
\subsubsection{Bemerkung}
|
|
|
|
$\genfrac{}{}{0pt}{0}{\chi_\alpha}{\chi_A}$ ist Polynom vom Grad
|
|
|
|
$\le\genfrac{}{}{0pt}{0}{\dim(V)}{n}$, da
|
2022-05-07 23:36:49 +02:00
|
|
|
\begin{align*}
|
|
|
|
& \chi_A(\lambda)=
|
|
|
|
\sum_{\pi \in S_n} \tilde{a}_{1\pi(1)}^{(\lambda)} \cdots \tilde{a}_{n\pi(n)}^{(\lambda)} \text{ mit} \\
|
2023-01-31 13:30:38 +01:00
|
|
|
& \tilde{a}_{ij}^{(\lambda)} =
|
|
|
|
\begin{cases}
|
|
|
|
a_{ij} & i\neq j \\
|
|
|
|
a_{ij}-\lambda & i=j
|
|
|
|
\end{cases}
|
|
|
|
\dots \text{ Polynom von Grad $0$ oder $1$}
|
2022-05-07 23:36:49 +02:00
|
|
|
\end{align*}
|
2022-04-12 12:48:05 +02:00
|
|
|
|
2022-06-08 23:25:28 +02:00
|
|
|
\begin{lemma}
|
|
|
|
\label{theo:2.2.7}
|
2022-04-12 12:48:05 +02:00
|
|
|
\begin{enumerate}[label=\alph*)]
|
|
|
|
\item $\chi_\alpha$ ist unabhängig von der Wahl der Basis.
|
2023-03-28 11:46:57 +02:00
|
|
|
\item $\chi_A = \chi_B$ wenn $A, B$ ähnlich (das heißt $\exists P \in \K^{n \times n}\colon B = P^{-1}AP$)
|
2022-04-12 12:48:05 +02:00
|
|
|
\end{enumerate}
|
|
|
|
\end{lemma}
|
2022-06-08 23:25:28 +02:00
|
|
|
\begin{proof}
|
|
|
|
\begin{enumerate}[label=\alph*)]
|
2023-01-31 13:30:38 +01:00
|
|
|
\item Sei C weitere Basis.\\ Dann gilt $\underbrace{{}_C M(\alpha)_C}_{B} =
|
|
|
|
\underbrace{{}_C M(\id)_B}_{P^{-1}} \underbrace{{}_B M(\alpha)_B}_{A}
|
|
|
|
\underbrace{{}_B M(\id)_C}_{P}$. \\ Man kann also alles auf b) zurückführen.
|
|
|
|
\item \[
|
|
|
|
\begin{aligned}
|
2022-06-08 23:25:28 +02:00
|
|
|
\chi_A(\lambda) & = \det(A-\lambda I) \\
|
|
|
|
& = \det(P)^{-1} \det(A - \lambda I) \det(P) \\
|
|
|
|
& = \det(P^{-1}) \det(A - \lambda I) \det(P) \\
|
|
|
|
& = \det(P^{-1}(A - \lambda I)P) \\
|
|
|
|
& = \det(P^{-1}AP-\lambda I) \\
|
|
|
|
& = \det(B - \lambda I) \\
|
|
|
|
& = \chi_B(\lambda)
|
2023-01-31 13:30:38 +01:00
|
|
|
\end{aligned}
|
|
|
|
\]
|
2022-06-08 23:25:28 +02:00
|
|
|
\end{enumerate}
|
|
|
|
\end{proof}
|
2022-04-12 12:48:05 +02:00
|
|
|
|
|
|
|
\begin{lemma}
|
|
|
|
\begin{enumerate}[label=\alph*)]
|
2023-03-28 11:46:57 +02:00
|
|
|
\item Sei $\alpha\in\homkv$. Dann gilt \[\spec(\alpha) = \{\lambda \in \K\colon \chi_\alpha(\lambda)=0\}\]
|
|
|
|
\item Sei $A\in \K^{\nxn}$. Dann gilt \[\spec(A) = \{\lambda \in \K\colon \chi_A(\lambda)=0\}\]
|
2022-04-12 12:48:05 +02:00
|
|
|
\end{enumerate}
|
|
|
|
\end{lemma}
|
2022-06-08 23:25:28 +02:00
|
|
|
\begin{proof}
|
|
|
|
Nur b)
|
2023-01-31 13:30:38 +01:00
|
|
|
\[
|
|
|
|
\begin{aligned}
|
2023-03-28 11:46:57 +02:00
|
|
|
\lambda \in \spec(A) & \iff \exists v\in V \setminus \{0\}\colon A v = \lambda v \\
|
|
|
|
& \iff \exists v \in V \setminus \{0\}\colon (A - \lambda I) v = 0 \\
|
|
|
|
& \iff \ker(A - \lambda I) \neq \{0\} \\
|
|
|
|
& \iff A - \lambda I \text{ nicht injektiv} \\
|
2022-06-08 23:25:28 +02:00
|
|
|
& \iff \det(A - \lambda I) = 0
|
2023-01-31 13:30:38 +01:00
|
|
|
\end{aligned}
|
|
|
|
\]
|
2022-06-08 23:25:28 +02:00
|
|
|
\end{proof}
|
2022-04-12 12:48:05 +02:00
|
|
|
|
|
|
|
\subsubsection{Beispiele}
|
|
|
|
\begin{alignat*}{3}
|
2023-01-31 13:30:38 +01:00
|
|
|
& A =
|
|
|
|
\begin{pmatrix}
|
|
|
|
\bar3 & \bar4 \\
|
|
|
|
\bar1 & \bar1
|
|
|
|
\end{pmatrix}
|
|
|
|
\in \mathbb{Z}_5^{2\times2} & & \\
|
|
|
|
& \chi_A(\lambda) =
|
|
|
|
\begin{vmatrix}
|
|
|
|
\bar3 - \lambda & \bar4 \\
|
|
|
|
\bar1 & \bar1 - \lambda
|
|
|
|
\end{vmatrix}
|
|
|
|
& & = (\bar3 - \lambda)(\bar1 - \lambda) - \bar4 \\
|
|
|
|
& & & = \bar3 - \bar4 \lambda + \lambda^2 - \bar4 \\
|
|
|
|
& & & = \bar4 - \bar4 \lambda + \lambda^2 = (\bar2 - \lambda)^2 \\
|
|
|
|
& \implies \spec(A) = \{2\} \\
|
|
|
|
& \eig_{\bar2}(A) = ? \\
|
|
|
|
& v \in \eig_{\bar2}(A) \iff \mathrlap{(A - \bar2 I)v = 0} \\
|
|
|
|
& \iff \mathrlap{\left(
|
|
|
|
\begin{array}{c c | c}
|
|
|
|
\bar3 - \bar2 & \bar4 & \bar0 \\
|
|
|
|
\bar1 & \bar1 - \bar2 & \bar0
|
|
|
|
\end{array}
|
|
|
|
\right)} \\
|
|
|
|
& \left(
|
|
|
|
\begin{array}{c c | c}
|
|
|
|
\bar1 & \bar4 & \bar0 \\
|
|
|
|
\bar1 & \bar4 & \bar0
|
|
|
|
\end{array}
|
|
|
|
\right) \\
|
|
|
|
& \left(
|
|
|
|
\begin{array}{c c | c}
|
|
|
|
\bar1 & \bar4 & \bar0 \\
|
|
|
|
\bar0 & \bar0 & \bar0
|
|
|
|
\end{array}
|
|
|
|
\right) \\
|
|
|
|
& \implies \eig_{\bar2}(A) = \linspan{
|
|
|
|
\begin{pmatrix}
|
|
|
|
\bar1 \\
|
|
|
|
\bar1
|
|
|
|
\end{pmatrix}
|
|
|
|
} \\
|
|
|
|
& \implies A \mathrlap{\text{ nicht diagonalisierbar [Lemma \ref{theo:2.1.4} (b)]}}
|
2022-04-12 12:48:05 +02:00
|
|
|
\end{alignat*}
|
|
|
|
|
|
|
|
\begin{lemma}
|
2022-06-08 23:25:28 +02:00
|
|
|
|
2022-04-28 10:33:22 +02:00
|
|
|
Sei $A \in \C^{n\times n}$ mit reellen Einträgen. Dann gilt:
|
2022-04-12 12:48:05 +02:00
|
|
|
\begin{enumerate}[label=\alph*)]
|
|
|
|
\item $\lambda \in \spec(A) \implies \overline{\lambda} \in \spec(A)$
|
|
|
|
\item $v \in \eig_\lambda(A) \implies \overline{v} \in \eig_{\overline{\lambda}}(A)$
|
|
|
|
\end{enumerate}
|
|
|
|
\end{lemma}
|
2022-06-08 23:25:28 +02:00
|
|
|
\begin{proof}
|
|
|
|
\begin{enumerate}[label=\alph*)]
|
|
|
|
\item Klarerweise ist $\chi_A(\lambda)$ ein Polynom mit reellen Koeffizienten, also
|
2023-01-31 13:30:38 +01:00
|
|
|
$\chi_A(\lambda)=a_0+a_1 \lambda + \cdots + a_n \lambda^n, a_0, \dots,
|
|
|
|
a_n\in\R$\\ Sei $\chi_A(\lambda)=0 \implies 0 = \overline0 = a_0 + a_1
|
|
|
|
\overline\lambda + \cdots + a_n \overline{\lambda} ^ n =
|
|
|
|
\chi_A(\overline\lambda)$
|
2022-06-28 14:39:16 +02:00
|
|
|
\item $v\in\eig_\lambda(A) \implies A v = \lambda v \implies \overline{A v}
|
2022-06-08 23:25:28 +02:00
|
|
|
= \overline{\lambda v} \implies A \overline{v} = \overline\lambda \overline{v}$
|
|
|
|
\end{enumerate}
|
|
|
|
\end{proof}
|
2022-04-12 12:48:05 +02:00
|
|
|
|
2022-06-08 23:25:28 +02:00
|
|
|
\begin{lemma}
|
|
|
|
\label{theo:2.2.10}
|
2022-04-12 12:48:05 +02:00
|
|
|
Eigenvektoren zu unterschiedlichen Eigenwerten sind linear unabhängig.
|
|
|
|
\end{lemma}
|
2022-06-08 23:25:28 +02:00
|
|
|
\begin{proof}
|
|
|
|
Seien $v_i \in \eig_{\lambda_i}(A), i=1, \dots, r, \lambda_i \neq \lambda_j \text{ für } i\neq j$.
|
|
|
|
Induktion nach $r$
|
|
|
|
\begin{itemize}
|
|
|
|
\item[$r=1$:] $v_1$ ist linear unabhängig.
|
2023-01-31 13:30:38 +01:00
|
|
|
\item[$r-1\mapsto r$:]
|
|
|
|
\begin{equation}
|
|
|
|
\label{eq:2.2.10.1}
|
|
|
|
\mu_1 v_1 + \cdots + \mu_r v_r = 0
|
|
|
|
\end{equation}
|
2022-06-08 23:25:28 +02:00
|
|
|
\[ \implies A(\mu_1 v_1 + \cdots + \mu_r v_r) = 0 \]
|
2023-01-31 13:30:38 +01:00
|
|
|
\begin{equation}
|
|
|
|
\label{eq:2.2.10.2}
|
2022-06-08 23:25:28 +02:00
|
|
|
\implies \lambda_1\mu_1 v_1 + \cdots \lambda_r \mu_r v_r = 0
|
|
|
|
\end{equation}
|
|
|
|
Weiters folgt durch Multiplikation von \ref{eq:2.2.10.1} mit $\lambda_r$,
|
2023-01-31 13:30:38 +01:00
|
|
|
dass
|
|
|
|
\begin{equation}
|
|
|
|
\label{eq:2.2.10.3}
|
|
|
|
\lambda_r \mu_1 v_1 + \cdots + \lambda_r \mu_r v_r = 0
|
|
|
|
\end{equation}
|
|
|
|
\[
|
|
|
|
\begin{aligned}
|
2022-06-08 23:25:28 +02:00
|
|
|
\text{\ref{eq:2.2.10.3}} - \text{\ref{eq:2.2.10.2}}
|
|
|
|
& \implies \underbrace{(\lambda_r - \lambda_1)}_{\neq0} \mu_1 v_1 + \cdots +
|
|
|
|
\underbrace{(\lambda_r - \lambda_{r-1})}_{\neq0} \mu_{r-1} v_{r-1} = 0 \\
|
2023-01-04 23:18:29 +01:00
|
|
|
& \implies v_1, \dots, v_{r-1} \text{ linear abhängig.}
|
2023-01-31 13:30:38 +01:00
|
|
|
\end{aligned}
|
|
|
|
\]\qed
|
2022-06-08 23:25:28 +02:00
|
|
|
\end{itemize}
|
|
|
|
\end{proof}
|
2022-04-12 12:48:05 +02:00
|
|
|
|
|
|
|
\begin{lemma}
|
2022-06-28 20:39:01 +02:00
|
|
|
Sei $\alpha \in \homkv, \dim(V)=n \text{ oder } A \in \K^{\nxn}$ mit $n$ Eigenvektoren zu verschiedenen
|
|
|
|
Eigenwerten. Dann ist $\alpha/A$ diagonalisierbar.
|
2022-04-12 12:48:05 +02:00
|
|
|
\end{lemma}
|
2022-06-08 23:25:28 +02:00
|
|
|
\begin{proof}
|
|
|
|
Wegen Lemma \ref{theo:2.2.10} gibt es Basis von Eigenvektoren.
|
|
|
|
Daher ist $\alpha/A$ diagonalisierbar wegen Lemma \ref{theo:2.2.2}.
|
|
|
|
\end{proof}
|
2022-04-12 12:48:05 +02:00
|
|
|
|
|
|
|
\begin{defin}
|
2022-05-07 23:36:49 +02:00
|
|
|
Sei $\spec(A) = \{\lambda_1, \dots, \lambda_r \}$ und
|
2022-06-28 20:39:01 +02:00
|
|
|
$\chi_A(\lambda)=(\lambda_1 - \lambda)^{k_1} \cdots (\lambda_r - \lambda)^{k_r} \cdot p \in\K[X]$ mit $p$
|
2022-05-07 23:36:49 +02:00
|
|
|
nicht durch Linearfaktoren teilbar (also keine Nullstellen in $\K$).\\
|
|
|
|
$k_i$ heißt \underline{algebraische Vielfachheit} des Eigenwerts $\lambda_i$.
|
|
|
|
Wir schreiben $k_i = m_a(\lambda_i)$.\\
|
|
|
|
$\dim(\eig_A(\lambda_i))$ heißt \underline{geometrische Vielfachheit} des Eigenwerts $\lambda_i$.
|
|
|
|
Wir schreiben $\dim(\eig_A(\lambda_i)) = m_g(\lambda_i)$
|
2022-04-12 12:48:05 +02:00
|
|
|
\end{defin}
|
|
|
|
|
|
|
|
\subsubsection{Beispiel}
|
|
|
|
\begin{itemize}
|
2022-06-28 20:39:01 +02:00
|
|
|
\item $\chi_A(\lambda) = \lambda^4 - 2 \lambda^3 + 2 \lambda^2 - 2\lambda + 1 \in \R[X]$\\
|
2022-06-28 14:39:16 +02:00
|
|
|
$\implies \chi_A(\lambda) = (1 - \lambda)^2 \underbrace{(1 + \lambda^2)}_{p(\lambda)}$ \\
|
2022-04-12 12:48:05 +02:00
|
|
|
$\implies m_a(1) = 2$
|
2023-01-31 13:30:38 +01:00
|
|
|
\item Für $\K=\C$ zerfällt jedes Polynom in Linearfaktoren, also ist $p$ immer
|
|
|
|
konstant.
|
2022-04-12 12:48:05 +02:00
|
|
|
\end{itemize}
|
|
|
|
|
|
|
|
\begin{satz}
|
2022-06-08 23:25:28 +02:00
|
|
|
|
2022-04-12 12:48:05 +02:00
|
|
|
Sei $\mu\in\spec(A)/\spec(\alpha)$. Dann gilt \[ 1\le m_g(\mu) \le m_a(\mu) \]
|
|
|
|
\end{satz}
|
2022-06-08 23:25:28 +02:00
|
|
|
\begin{proof}
|
|
|
|
Klarerweise gilt $1\le m_g(\mu)$ da $\mu$ Eigenwert ist.
|
|
|
|
Sei $r:= m_g(\mu)$ und $b_1, \dots, b_r$ Basis von $\eig_\alpha(\mu)$. Sei $B=(b_1, \dots, b_n)$ Basis.
|
|
|
|
Dann ist
|
|
|
|
\[ {}_B M(\alpha)_B =
|
|
|
|
\bordermatrix{
|
|
|
|
& & & & r & & \cr
|
|
|
|
& \mu & 0 & 0 & 0 & * & \dots & * \cr
|
|
|
|
& 0 & \mu & 0 & 0 & * & \dots & * \cr
|
|
|
|
& \vdots & & \ddots & \vdots & \vdots & & \vdots \cr
|
|
|
|
r & 0 & 0 & 0 & \mu & * & \dots & * \cr
|
|
|
|
& \vdots & \vdots & \vdots & \vdots & \vdots & & \vdots \cr
|
|
|
|
& 0 & 0 & 0 & 0 & * & \dots & *
|
|
|
|
}
|
|
|
|
\text{, also}
|
|
|
|
\]
|
|
|
|
\begin{align*}
|
2023-01-31 13:30:38 +01:00
|
|
|
\chi_\alpha(\lambda) & = \abs {
|
|
|
|
\begin{array}{c | c}
|
|
|
|
\begin{smallmatrix}
|
2022-06-13 10:53:40 +02:00
|
|
|
\mu - \lambda & & \\
|
|
|
|
& \ddots & \\
|
|
|
|
& & \mu - \lambda
|
2023-01-31 13:30:38 +01:00
|
|
|
\end{smallmatrix}
|
|
|
|
& A \\
|
|
|
|
\hline \\
|
|
|
|
0 & C
|
|
|
|
\end{array}
|
|
|
|
} & & \underbrace{=}_
|
2022-06-08 23:25:28 +02:00
|
|
|
{\text{Satz \ref{theo:1.4.10}}} \det
|
|
|
|
\begin{pmatrix}
|
|
|
|
\mu - \lambda & & 0 \\
|
|
|
|
& \ddots & \\
|
|
|
|
0 & & \mu - \lambda
|
2023-01-31 13:30:38 +01:00
|
|
|
\end{pmatrix}
|
|
|
|
\cdot \det(C) \\
|
|
|
|
& & & = (\mu - \lambda)^r \det(C) \\
|
|
|
|
& & & \implies r \le m_a(\mu)
|
2022-06-08 23:25:28 +02:00
|
|
|
\end{align*}
|
|
|
|
\end{proof}
|
2022-04-12 12:48:05 +02:00
|
|
|
|
|
|
|
\begin{lemma}
|
2022-06-08 23:25:28 +02:00
|
|
|
|
2023-01-31 13:30:38 +01:00
|
|
|
Seien $A, B$ ähnlich und $\mu \in \spec(A) (=\spec(B) \text{ nach Lemma
|
|
|
|
\ref{theo:2.2.7}})$. Dann stimmen die geometrischen Vielfachheiten überein, das
|
|
|
|
heißt $\dim(\eig_\mu(A)) = \dim(\eig_\mu(B))$.
|
2022-04-12 12:48:05 +02:00
|
|
|
\end{lemma}
|
2022-06-08 23:25:28 +02:00
|
|
|
\begin{proof}
|
|
|
|
Sei $B = P^{-1} A P$. Dann gilt
|
|
|
|
\begin{align*}
|
|
|
|
\eig_{\mu}(B) & = \ker(B - \mu I) = \ker(B - \mu P^{-1} P) \\
|
|
|
|
& = \ker(P^{-1} (A - \mu I) P) \\
|
|
|
|
& \underbrace{\implies}_{\mathclap{\text{
|
|
|
|
Für ähnliche Matrizen stimmen die Dimensionen der Kerne überein
|
|
|
|
}}} \dim(\eig_\mu(B)) = \dim\eig_\mu(A)
|
|
|
|
\end{align*}
|
|
|
|
\end{proof}
|
2022-04-12 12:48:05 +02:00
|
|
|
|
|
|
|
\begin{satz}
|
2022-06-08 23:25:28 +02:00
|
|
|
|
2022-04-12 12:48:05 +02:00
|
|
|
$A/\alpha$ diagonalisierbar $\iff$
|
|
|
|
\begin{enumerate}[label=\roman*)]
|
|
|
|
\item $\chi_{A/\alpha}$ zerfällt in Linearfaktoren, das heißt
|
2022-05-07 23:36:49 +02:00
|
|
|
\[
|
|
|
|
\chi_{A/\alpha}(\lambda)= (\lambda_1 - \lambda)^{k_1} \cdots (\lambda_r - \lambda)^{k_r},
|
2022-06-30 13:04:01 +02:00
|
|
|
\sum_{i=1}^r k_i = n
|
2022-05-07 23:36:49 +02:00
|
|
|
\]
|
2023-01-31 13:30:38 +01:00
|
|
|
\item algebraische und geometrische Vielfachheiten stimmen überein, das \\ heißt
|
|
|
|
$m_a(\lambda_i) = m_g(\lambda_i), i=1, \dots, r$
|
2022-04-12 12:48:05 +02:00
|
|
|
\end{enumerate}
|
|
|
|
\end{satz}
|
2022-06-08 23:25:28 +02:00
|
|
|
\begin{proof}
|
|
|
|
\begin{itemize}
|
2023-01-31 13:30:38 +01:00
|
|
|
\item[$\impliedby$:] Aus i), ii) folgt, dass
|
|
|
|
\begin{equation}
|
|
|
|
\label{eq:2.2.15.1}
|
|
|
|
\sum_{i=1}^r \underbrace{\dim(\eig_\alpha(\lambda_i))}_{=m_g(\lambda_i)=:d_i} = n
|
|
|
|
\end{equation}
|
2022-06-28 20:39:01 +02:00
|
|
|
Sei $(b_i^1, \dots, b_i^{d_i})$ Basis von $\eig_\alpha(\lambda_i)$.
|
2023-03-28 11:46:57 +02:00
|
|
|
Wir zeigen, dass $B=\{b_i^1, \dots, b_i^{d_i}\colon i=1, \dots, r\}$ Basis ist.
|
2022-06-08 23:25:28 +02:00
|
|
|
\begin{enumerate}[label=\arabic*)]
|
2022-06-13 10:53:40 +02:00
|
|
|
\item $\abs{ B } = n$ folgt aus \ref{eq:2.2.15.1}
|
2023-01-31 13:30:38 +01:00
|
|
|
\item Ang. $\sum\limits_{i=1}^r (\underbrace{\mu_i^1 b_i^1 + \cdots + \mu_i^{d_i}
|
|
|
|
b_i^{d_i}}_{v_i}) = 0$ \\ $\underbrace{\implies}_ {\mathclap{\substack{v_i
|
|
|
|
\text{Eigenvektoren zu} \\ \text{verschiedenen Eigenwerten} \\ + \text{Lemma
|
|
|
|
\ref{theo:2.2.10}}}}} v_i = 0 \forall i=1, \dots, r \underbrace{\implies}_
|
2022-06-08 23:25:28 +02:00
|
|
|
{\mathclap{\substack{b_i^1, \dots, b_i^{d_i} \\ \text{Basis von }
|
2023-01-31 13:30:38 +01:00
|
|
|
\eig_\alpha(\lambda_i)}}} \mu_i^1, \dots, \mu_i^{d_i} = 0 \forall i=1, \dots,
|
|
|
|
r$ \\ $ \implies B $ ist Basis aus Eigenvektoren
|
|
|
|
$\underbrace{\implies}_{\mathclap{\text{Lemma \ref{theo:2.2.2}}}} \alpha $
|
|
|
|
diagonalisierbar.
|
2022-06-08 23:25:28 +02:00
|
|
|
|
|
|
|
\end{enumerate}
|
|
|
|
|
2023-01-31 13:30:38 +01:00
|
|
|
\item[$\implies$:] Sei $\alpha$ diagonalisierbar. \[
|
|
|
|
\begin{aligned}
|
2022-06-28 20:39:01 +02:00
|
|
|
& \implies \exists \text{ Basis } B=(b_1, \dots, b_n) \text{ aus Eigenvektoren} \\
|
2022-06-08 23:25:28 +02:00
|
|
|
& \implies {}_B M(\alpha)_B = \diag(\lambda_1, \dots, \lambda_n) \\
|
|
|
|
& \implies \chi_B(\lambda) = (\lambda_1 - \lambda) \cdots (\lambda_n - \lambda)
|
2023-01-31 13:30:38 +01:00
|
|
|
\end{aligned}
|
|
|
|
\]
|
2022-06-08 23:25:28 +02:00
|
|
|
\end{itemize}
|
|
|
|
\end{proof}
|
2022-04-12 12:48:05 +02:00
|
|
|
|
|
|
|
\subsubsection{Diagonalisieren}
|
|
|
|
\begin{enumerate}[label=\arabic*)]
|
|
|
|
\item Zerlegung in Linearfaktoren
|
2022-05-07 23:36:49 +02:00
|
|
|
\[
|
|
|
|
\chi_A(\lambda) = (\lambda_1 - \lambda)^{m_a(\lambda_1)} \cdots (\lambda_r - \lambda)^{m_a(\lambda_r)}
|
|
|
|
\]
|
2022-04-12 12:48:05 +02:00
|
|
|
\item Bestimme Basis $B_i$ der Eigenräume
|
|
|
|
\[ \eig_A(\lambda_i) = \ker(A - \lambda_i I) \]
|
|
|
|
\item Ordne Basis $B= \bigcup\limits_{i=1}^n B_i$ zu $B= (b_1, \dots, b_n)$
|
2022-05-07 23:36:49 +02:00
|
|
|
\item Mit $S = (b_1, \dots, b_n)$ gilt dann
|
|
|
|
\[
|
2022-04-27 12:36:04 +02:00
|
|
|
\diag(\underbrace{\lambda_1, \dots, \lambda_n}_{
|
2022-05-07 23:36:49 +02:00
|
|
|
\mathclap{\substack{\text{Eigenwerte werden nach} \\
|
|
|
|
\text{Vielfachheit gezählt!} \\
|
|
|
|
\lambda_i \text{ ist Eigenwert von } b_i \text{!}}}
|
2022-04-27 16:18:26 +02:00
|
|
|
}) = S^{-1} A S
|
2022-04-12 12:48:05 +02:00
|
|
|
\]
|
|
|
|
\end{enumerate}
|
|
|
|
|
|
|
|
\subsubsection{Beispiel}
|
2023-01-31 13:30:38 +01:00
|
|
|
$A =
|
|
|
|
\begin{pmatrix}
|
2022-04-12 12:48:05 +02:00
|
|
|
1 & 2 & 2 \\
|
|
|
|
2 & -2 & 1 \\
|
|
|
|
2 & 1 & -2
|
2023-01-31 13:30:38 +01:00
|
|
|
\end{pmatrix}
|
|
|
|
$
|
2022-04-12 12:48:05 +02:00
|
|
|
\begin{enumerate}[label=\arabic*)]
|
|
|
|
|
2023-01-31 13:30:38 +01:00
|
|
|
\item
|
|
|
|
\begin{align*}
|
|
|
|
\chi_A(\lambda) = &
|
|
|
|
\begin{vmatrix}
|
|
|
|
1 -\lambda & 2 & 2 \\
|
|
|
|
2 & -2 -\lambda & 1 \\
|
|
|
|
2 & 1 & -2 -\lambda
|
|
|
|
\end{vmatrix}
|
|
|
|
\\
|
|
|
|
\underbrace{=}_{\mathclap{\substack{\text{Entwicklung} \\
|
|
|
|
\text{nach 1. Zeile}}}}
|
|
|
|
& (1-\lambda)
|
|
|
|
\begin{vmatrix}
|
|
|
|
-2 -\lambda & 1 \\
|
|
|
|
1 & -2 -\lambda
|
|
|
|
\end{vmatrix}
|
|
|
|
+ (-2)
|
|
|
|
\begin{vmatrix}
|
|
|
|
2 & 1 \\
|
|
|
|
2 & -2-\lambda
|
|
|
|
\end{vmatrix}
|
|
|
|
\\
|
|
|
|
& + 2
|
|
|
|
\begin{vmatrix}
|
|
|
|
2 & -2 - \lambda \\
|
|
|
|
2 & 1
|
|
|
|
\end{vmatrix}
|
|
|
|
\\
|
2022-05-07 23:36:49 +02:00
|
|
|
= & \dots= -\lambda^3 - 3 \lambda^2 + 9\lambda + 27 = (3-\lambda)(-3-\lambda)^2
|
|
|
|
\end{align*}
|
2022-04-12 12:48:05 +02:00
|
|
|
|
|
|
|
\item $\lambda = 3$
|
2022-05-07 23:36:49 +02:00
|
|
|
\begin{align*}
|
2023-01-31 13:30:38 +01:00
|
|
|
& \left(
|
|
|
|
\begin{array}{c c c | c} 1-3 & 2 & 2 & 0 \\
|
|
|
|
2 & -2-3 & 1 & 0 \\
|
|
|
|
2 & 1 & -2-3 & 0
|
|
|
|
\end{array}
|
|
|
|
\right)
|
|
|
|
= \left(
|
|
|
|
\begin{array}{c c c | c} -2 & 2 & 2 & 0 \\
|
|
|
|
2 & -5 & 1 & 0 \\
|
|
|
|
2 & 1 & -5 & 0
|
|
|
|
\end{array}
|
|
|
|
\right) \\
|
|
|
|
& \sim \left(
|
|
|
|
\begin{array}{c c c | c} -1 & 1 & 1 & 0 \\
|
|
|
|
0 & -3 & 3 & 0 \\
|
|
|
|
0 & 3 & -3 & 0
|
|
|
|
\end{array}
|
|
|
|
\right)
|
|
|
|
\sim \left(
|
|
|
|
\begin{array}{c c c | c} 1 & -1 & -1 & 0 \\
|
|
|
|
0 & 1 & -1 & 0 \\
|
|
|
|
0 & 0 & 0 & 0
|
|
|
|
\end{array}
|
|
|
|
\right)
|
|
|
|
\sim \left(
|
|
|
|
\begin{array}{c c c | c} 1 & 0 & -2 & 0 \\
|
|
|
|
0 & 1 & -1 & 0 \\
|
|
|
|
0 & 0 & 0 & 0
|
|
|
|
\end{array}
|
|
|
|
\right) \\
|
|
|
|
& \implies \eig_A(3) = \linspan{
|
|
|
|
\begin{pmatrix}
|
|
|
|
2 \\
|
|
|
|
1 \\
|
|
|
|
1
|
|
|
|
\end{pmatrix}
|
|
|
|
}
|
2022-05-07 23:36:49 +02:00
|
|
|
\end{align*}
|
2022-04-12 12:48:05 +02:00
|
|
|
|
|
|
|
$\lambda = -3$
|
2022-05-07 23:36:49 +02:00
|
|
|
\begin{align*}
|
2023-01-31 13:30:38 +01:00
|
|
|
& \left(
|
|
|
|
\begin{array}{c c c | c} 1+3 & 2 & 2 & 0 \\
|
|
|
|
2 & -2+3 & 1 & 0 \\
|
|
|
|
2 & 1 & -2+3 & 0
|
|
|
|
\end{array}
|
|
|
|
\right)
|
|
|
|
= \left(
|
|
|
|
\begin{array}{c c c | c} 4 & 2 & 2 & 0 \\
|
|
|
|
2 & 1 & 1 & 0 \\
|
|
|
|
2 & 1 & 1 & 0
|
|
|
|
\end{array}
|
|
|
|
\right) \\
|
|
|
|
& \sim \left(
|
|
|
|
\begin{array}{c c c | c} 2 & 1 & 1 & 0 \\
|
|
|
|
0 & 0 & 0 & 0 \\
|
|
|
|
0 & 0 & 0 & 0
|
|
|
|
\end{array}
|
|
|
|
\right) \\
|
|
|
|
& \implies \eig_A(-3) = \linspan {
|
|
|
|
\begin{pmatrix}
|
|
|
|
-1 \\
|
|
|
|
0 \\
|
|
|
|
2
|
|
|
|
\end{pmatrix}
|
|
|
|
,
|
|
|
|
\begin{pmatrix}
|
|
|
|
-1 \\
|
|
|
|
2 \\
|
|
|
|
0
|
|
|
|
\end{pmatrix}
|
|
|
|
}
|
2022-05-07 23:36:49 +02:00
|
|
|
\end{align*}
|
2022-04-12 12:48:05 +02:00
|
|
|
|
2023-01-31 13:30:38 +01:00
|
|
|
\item
|
|
|
|
\begin{align*}
|
|
|
|
& S =
|
|
|
|
\begin{pmatrix}
|
|
|
|
2 & -1 & -1 \\
|
|
|
|
1 & 0 & 2 \\
|
|
|
|
1 & 2 & 0
|
|
|
|
\end{pmatrix}
|
|
|
|
\\
|
|
|
|
& \implies S^{-1} A S =
|
|
|
|
\begin{pmatrix}
|
|
|
|
3 & 0 & 0 \\
|
|
|
|
0 & -3 & 0 \\
|
|
|
|
0 & 0 & -3
|
|
|
|
\end{pmatrix}
|
2022-05-07 23:36:49 +02:00
|
|
|
\end{align*}
|
2022-04-12 12:48:05 +02:00
|
|
|
|
|
|
|
\end{enumerate}
|
|
|
|
|
2022-06-08 23:25:28 +02:00
|
|
|
\begin{lemma}
|
|
|
|
\label{theo:2.2.16}
|
2022-04-27 12:36:04 +02:00
|
|
|
Sei $A\in\K^{\nxn}$ und $\underbrace{\spur(A)}_{\mathclap{\color{red}\text{\dq Spur von $A$ \dq}}}
|
2022-04-27 16:18:26 +02:00
|
|
|
:= \sum\limits_{i=1}^n a_{ii}$
|
2022-04-12 12:48:05 +02:00
|
|
|
\[\chi_A(\lambda) = (-1)^n\lambda^n + (-1)^{n-1} \spur(A) \lambda^{n-1} + \cdots + \det(A)\]
|
|
|
|
\end{lemma}
|
2022-06-08 23:25:28 +02:00
|
|
|
\begin{proof}
|
2022-06-18 18:20:57 +02:00
|
|
|
\[
|
|
|
|
\chi_A(\lambda) = \sum\limits_{\pi \in S_n} \sgn(\pi) \prod\limits_{i=1}^n \tilde{a}_{i\pi(i)} \text{ mit }
|
2023-01-31 13:30:38 +01:00
|
|
|
\tilde{a}_{ij} =
|
|
|
|
\begin{cases}
|
|
|
|
a_{ij} & i\neq j \\
|
|
|
|
a_{ij} - \lambda & i=j
|
|
|
|
\end{cases}
|
2022-06-18 18:20:57 +02:00
|
|
|
\]
|
2023-01-31 13:30:38 +01:00
|
|
|
Wenn $\pi\neq \id$ gilt $\deg\left(\prod\limits_{i=1}^n
|
|
|
|
\tilde{a}_{i\pi(i)}\right)\le n-2$, da mindestens zwei Elemente vertauscht
|
|
|
|
werden. Die Koeffizienten von Grad $n, n-1$ kann man also aus
|
|
|
|
$\prod\limits_{i=1}^n \tilde{a}_{ii} = \prod\limits_{i=1}^n (a_{ii} - \lambda)$
|
|
|
|
ablesen. Daraus folgt die Behauptung für die höchsten beiden Koeffizienten.
|
|
|
|
Weiters gilt $\chi_A(0)=\det(A)$, was die Aussage für den konstanten
|
|
|
|
Koeffizienten zeigt.
|
2022-06-08 23:25:28 +02:00
|
|
|
\end{proof}
|
2022-04-12 12:48:05 +02:00
|
|
|
|
|
|
|
\begin{korollar}
|
|
|
|
\begin{enumerate}[label=\alph*)]
|
|
|
|
\item $A\sim B \implies \spur(A)=\spur(B)$
|
2022-04-27 12:36:04 +02:00
|
|
|
\item A diagonalisierbar $\implies \spur(A)=\lambda_1 + \cdots + \lambda_n$ mit
|
2022-04-27 16:18:26 +02:00
|
|
|
$\lambda_1, \dots, \lambda_n$ Eigenwerte von $A$, nach Vielfachheit gezählt.
|
2022-04-27 12:36:04 +02:00
|
|
|
\item A diagonalisierbar $\implies \det(A)=\lambda_1 \cdot \dots \cdot \lambda_n$ mit
|
2022-04-27 16:18:26 +02:00
|
|
|
$\lambda_1, \dots, \lambda_n$ Eigenwerte von $A$, nach Vielfachheit gezählt.
|
2022-04-12 12:48:05 +02:00
|
|
|
\end{enumerate}
|
|
|
|
\end{korollar}
|
2022-06-08 23:25:28 +02:00
|
|
|
\begin{proof}
|
|
|
|
Folgt daraus, dass das charakteristische Polynom (und damit seine Koeffizienten) unter Ähnlichkeit
|
|
|
|
invariant sind (Lemma \ref{theo:2.2.7}) und Lemma \ref{theo:2.2.16}
|
|
|
|
\end{proof}
|
2022-04-12 12:48:05 +02:00
|
|
|
|
2023-01-31 13:30:38 +01:00
|
|
|
\begin{satz}[Cayley-Hamilton]
|
2022-05-07 23:36:49 +02:00
|
|
|
\dq$\chi_A(A) = 0$\dq, das heißt sei $A\in \K^{\nxn}$ mit charakteristischem Polynom
|
|
|
|
$\chi_A(\lambda)=c_n \lambda^n + c_{n-1} \lambda^{n-1} + \cdots + c_0$.
|
2022-04-12 12:48:05 +02:00
|
|
|
Dann gilt
|
|
|
|
\[
|
2022-05-07 23:36:49 +02:00
|
|
|
\chi_A(A):=c_n A^n + c_{n-1} A ^{n-1} + \cdots c_0 I = 0 =
|
2023-01-31 13:30:38 +01:00
|
|
|
\begin{pmatrix}
|
|
|
|
0 & \dots & 0 \\
|
|
|
|
\vdots & \ddots & \vdots \\
|
|
|
|
0 & \dots & 0
|
|
|
|
\end{pmatrix}
|
|
|
|
\in \K^{\nxn}
|
2022-04-12 12:48:05 +02:00
|
|
|
\]
|
|
|
|
\end{satz}
|
2022-06-08 23:25:28 +02:00
|
|
|
\begin{proof}
|
|
|
|
Sei $B := A^T - \lambda I =
|
|
|
|
\begin{pmatrix}
|
|
|
|
a_{11} - \lambda & a_{21} & \dots & a_{n1} \\
|
|
|
|
a_{12} & a_{22} - \lambda & \dots & a_{n2} \\
|
|
|
|
\vdots & \ddots & \ddots & \vdots \\
|
|
|
|
a_{1n} & a_{2n} & \dots & a_{nn} - \lambda
|
|
|
|
\end{pmatrix}
|
|
|
|
= (a_{ji} - \delta_{ij} \lambda)_{ij}$
|
|
|
|
und $C:= \adj(B)$, sodass
|
|
|
|
\begin{equation}
|
2022-06-30 13:04:01 +02:00
|
|
|
CB \overset{\text{\ref{theo:1.4.7}}}{=} \det(B) I_n = \chi_A \cdot I_n \; [\chi_A = \chi_{A^T}]
|
2022-06-08 23:25:28 +02:00
|
|
|
\label{eq:2.2.18.1}
|
|
|
|
\end{equation}
|
|
|
|
\ref{eq:2.2.18.1} heißt komponentenweise, dass
|
|
|
|
\begin{flalign}
|
|
|
|
& \sum_{i=1}^{n}
|
|
|
|
\underbrace{c_{ki}}_{\mathrlap{\text{Polynome, in die $A$ eingesetzt werden kann}}}
|
|
|
|
\underbrace{b_{ij}}
|
|
|
|
= \delta_{ij} \cdot \underbrace{\chi_A} \forall k, j \in [n] \nonumber \\
|
|
|
|
= & \sum_{i=1}^{n}c_{ki}(A) b_{ij}(A) = \delta_{jk}\chi_A (A) \label{eq:2.2.18.2}
|
|
|
|
\end{flalign}
|
2022-06-30 13:04:01 +02:00
|
|
|
Wegen $b_{ij}(A) = a_{ji} I_n - \delta_{ij}A$ gilt weiters
|
2022-06-08 23:25:28 +02:00
|
|
|
\begin{equation}
|
2023-03-28 11:46:57 +02:00
|
|
|
\forall i \in [n]\colon \sum_{j=1}^{n} b_{ij}(A) e_j = \left(\sum_{j=1}^{n} a_{ji} e_j\right) - A e_i = 0
|
2022-06-08 23:25:28 +02:00
|
|
|
\label{eq:2.2.18.3}
|
|
|
|
\end{equation}
|
2023-03-28 11:46:57 +02:00
|
|
|
Es folgt $\forall k \in [n]\colon$
|
2022-06-08 23:25:28 +02:00
|
|
|
\begin{align*}
|
2022-06-30 13:04:01 +02:00
|
|
|
\chi_A (A) e_k & = \sum_{j=1}^{n} \delta_{jk} \chi_A(A) e_j & \\
|
2022-06-08 23:25:28 +02:00
|
|
|
& \underbrace{=}_{\mathclap{\text{\ref{eq:2.2.18.2}}}}
|
2022-06-15 19:34:43 +02:00
|
|
|
\sum_{j=1}^{n} \sum_{i=1}^{n} c_{ki}(A) b_{ij}(A) e_j & \\
|
2022-06-30 13:04:01 +02:00
|
|
|
& = \sum_{i=1}^{n} c_{ki}(A) \left(\sum_{j=1}^{n} b_{ij}(A) e_j\right) & \\
|
2022-06-15 19:34:43 +02:00
|
|
|
& \underbrace{=}_{\mathclap{\text{\ref{eq:2.2.18.3}}}} 0 & \\
|
2022-06-08 23:25:28 +02:00
|
|
|
& \implies \chi_A(A) = 0
|
|
|
|
\end{align*}
|
|
|
|
\end{proof}
|
2022-04-12 12:48:05 +02:00
|
|
|
|
|
|
|
\subsubsection{Berechnung der Koeffizienten von $\chi_A$}
|
2023-01-31 13:30:38 +01:00
|
|
|
Sei $f(\lambda) \underbrace{=}_{\text{(*)}} \prod\limits_{j=1}^{n}(\lambda_j -
|
|
|
|
\lambda) = \underbrace{c_n}_{=(-1)^n}\lambda^n + c_{n-1}\lambda ^{n-1} + \cdots
|
|
|
|
+ c_0$ Wie können wir $c_j$ effizient bestimmen?
|
2022-06-28 14:39:16 +02:00
|
|
|
\[
|
|
|
|
\sigma_j := (-1)^j \sum\limits_{\substack{S\subset [n] \\ \abs{ S } = n-j}}
|
|
|
|
\prod\limits_{s \in S} \lambda_s
|
|
|
|
\]
|
2022-04-12 12:48:05 +02:00
|
|
|
\begin{itemize}
|
|
|
|
\item [Bemerkung 1:] $\displaystyle { c_j = (-1)^{j} \sum_{\substack{S\subseteq [n] \\
|
2022-06-13 10:53:40 +02:00
|
|
|
\abs{ S } = n-j}} \prod_{s \in S} \lambda_s =:
|
2022-04-12 12:48:05 +02:00
|
|
|
\sigma_{n-j}^n (\lambda_1, \dots, \lambda_n)}$ \\
|
|
|
|
Dies folgt aus (*) durch Ausmultiplizieren \\
|
|
|
|
Sei nun weiters $p_j^n(\lambda_1, \dots, \lambda_n) := \sum\limits_{i=1}^{n}\lambda_i^j$
|
|
|
|
\item [Bemerkung 2:] $\sigma_j^n, p_j^n$ sind symmetrisch, das heißt
|
2022-05-12 09:32:32 +02:00
|
|
|
\[
|
|
|
|
\begin{aligned}
|
2022-05-07 23:36:49 +02:00
|
|
|
& \sigma_j^n(\lambda_{\pi(1)}, \dots, \lambda_{\pi(n)}) =
|
|
|
|
\sigma_{j}^n (\lambda_1, \dots, \lambda_n) \\
|
2022-04-12 12:48:05 +02:00
|
|
|
& p_j^n(\lambda_{\pi(1)}, \dots, \lambda_{\pi(n)}) = p_{j}^n (\lambda_1, \dots, \lambda_n)
|
2023-01-31 13:30:38 +01:00
|
|
|
\end{aligned}
|
|
|
|
\text{ für } \pi \in S_n
|
2022-05-12 09:32:32 +02:00
|
|
|
\]
|
2022-04-12 12:48:05 +02:00
|
|
|
\end{itemize}
|
|
|
|
|
2023-01-31 13:30:38 +01:00
|
|
|
\begin{lemma}[Newtonidentität] \label{theo:2.2.19}
|
2022-04-12 12:48:05 +02:00
|
|
|
Es gilt für $k\le n$
|
|
|
|
\[k\sigma_k^n+\sum_{j=0}^{k-1}\sigma_j^n p_{k-j}^n=0\]
|
|
|
|
\end{lemma}
|
2022-06-08 23:25:28 +02:00
|
|
|
\begin{proof}
|
|
|
|
Induktion.
|
|
|
|
\begin{itemize}
|
|
|
|
\item [$k=n$:] Wegen
|
|
|
|
\begin{equation*}
|
|
|
|
0= \sum_{i=1}^{n} =
|
|
|
|
\sum_{i=1}^{n} \sum_{j=0}^n c_j \lambda_i^j =
|
|
|
|
\sum_{j=0}^n c_j p_j^n =
|
|
|
|
\sum_{j=0}^n \sigma_{n-j}^n p_j^n =
|
|
|
|
\sum_{j=0}^n \sigma_j^n p_{n-j}^n
|
|
|
|
\end{equation*}
|
|
|
|
folgt $\sigma_n^n p_0^n + \sum\limits_{j=0}^n \sigma_j^n p_{n-j}^n = 0$ was mit
|
|
|
|
$p_0^n = n$ die gewünschte Aussage liefert.
|
|
|
|
\item [$k<n$:] Betrachte das (symmetrische) \[
|
|
|
|
q(\lambda_1, \dots, \lambda_n) :=
|
|
|
|
k \sigma_k^n + \sum_{j=0}^{k-1} \sigma_j^n p_{k-1}^n
|
|
|
|
\]
|
|
|
|
Es gilt \[q(\lambda_1, \dots, \lambda_n) =
|
|
|
|
\sum_{j_1, \dots, j_n} c_{j_1 \dots j_n} \lambda_1^{j_1} \cdots \lambda_n^{j_n}\]
|
2023-01-31 13:30:38 +01:00
|
|
|
und wir müssen zeigen, dass alle Koeffizienten $c_{j_1 \dots j_n}=0$ sind. Dazu
|
|
|
|
bemerken wir, dass $c_{j_1 \dots j_n}$ immer $0$ ist, wenn mehr als $k$ $j_i$'s
|
|
|
|
ungleich $0$ sind.\\ Sei also $c_{j_1 \dots j_n}$ ein solcher Koeffizient mit
|
|
|
|
$j_{k+1}, \dots, j_n=0$. Dann gilt
|
2022-06-08 23:25:28 +02:00
|
|
|
\begin{align*}
|
|
|
|
& \underset{\rotatebox{90}{$=$}}
|
|
|
|
{q(\lambda_1, \dots, \lambda_k, 0, \dots, 0)} =
|
|
|
|
\sum_{j_1, \dots, j_k} c_{j_1 \dots j_n 0 \dots 0}
|
|
|
|
\lambda_1^{j_1} \cdots \lambda_n^{j_k} \\
|
|
|
|
& k \sigma_k^n + \sum_{j=0}^{k-1} \sigma_j^k p_{k-1}^k = 0
|
|
|
|
\text{ nach Voraussetzung}
|
|
|
|
\end{align*}
|
|
|
|
Aufgrund der Symmetrie gilt dasselbe Argument für alle anderen Koeffizienten
|
|
|
|
mit höchstens $k$ vielen $j_i$'s ungleich $0$.
|
|
|
|
\end{itemize}
|
|
|
|
\end{proof}
|
|
|
|
|
|
|
|
\begin{satz}
|
|
|
|
|
2022-04-12 12:48:05 +02:00
|
|
|
Sei $A \in \K^{\nxn}$ diagonalisierbar. Dann gilt für
|
|
|
|
\begin{align*}
|
|
|
|
\chi_A(\lambda) & = c_{n}\lambda^{n} + c_{n-1} \lambda ^{n-1} + \cdots + c_0 \\
|
|
|
|
& c_n = (-1)^n \\
|
|
|
|
& c_{n-k} = -\frac1k \sum_{j=0}^{k-1} c_{n-j} \spur(A^{k-j})
|
|
|
|
\end{align*}
|
|
|
|
\end{satz}
|
2022-06-08 23:25:28 +02:00
|
|
|
\begin{proof}
|
|
|
|
Folgt direkt aus Lemma \ref{theo:2.2.19} und der Bemerkung dass für $A$ diagonalisierbar \\
|
|
|
|
$\spur(A^k) = \lambda_1^k + \cdots + \lambda_n^k$ gilt.
|
|
|
|
\end{proof}
|
2022-04-12 12:48:05 +02:00
|
|
|
|
|
|
|
\subsubsection{Bemerkung}
|
|
|
|
$\underset{\mathrlap{\text{\dq fast alle Matrizen sind diagonalisierbar\dq}}}
|
|
|
|
{\text{Gilt}}$ auch für $A$ nicht diagonalisierbar. \dq Beweis\dq Stetigkeit
|
|
|
|
|
|
|
|
\subsubsection{Triangulierbarkeit von Matrizen}
|
|
|
|
|
|
|
|
\begin{defin}
|
|
|
|
\begin{enumerate}[label=\alph*)]
|
2022-04-28 10:33:22 +02:00
|
|
|
\item $\alpha \in \homkv, \dim(V)=n$ heißt \underline{triangulierbar} wenn es eine Basis $B$ gibt,
|
2022-04-12 12:48:05 +02:00
|
|
|
sodass ${}_B M(\alpha)_B$ obere Dreiecksgestalt hat.
|
|
|
|
\item $A\in\K^{\nxn}$ heißt \underline{triangulierbar} wenn es eine reguläre Matrix $P\in\K^{\nxn}$ gibt,
|
|
|
|
mit $P^{-1} A P$ obere Dreiecksgestalt.
|
|
|
|
\end{enumerate}
|
|
|
|
\end{defin}
|
|
|
|
|
|
|
|
\begin{satz}
|
2022-06-08 23:25:28 +02:00
|
|
|
|
2022-04-12 12:48:05 +02:00
|
|
|
$A \in \K^{\nxn} / \alpha$ ist triangulierbar $\iff \chi_A / \chi_\alpha$ zerfällt in Linearfaktoren.
|
2022-06-08 23:25:28 +02:00
|
|
|
\end{satz}
|
|
|
|
\begin{proof}[Beweis]
|
|
|
|
\begin{itemize}
|
|
|
|
\item[$\implies$:] $\chi_A$ ist invariant
|
2022-06-09 11:00:51 +02:00
|
|
|
bezüglich Ähnlichkeitsumformung (Lemma \ref{theo:2.2.7}). \\
|
2023-01-31 13:30:38 +01:00
|
|
|
Sei $P^{-1} A P =
|
|
|
|
\begin{pmatrix}
|
|
|
|
\lambda_1 & & * \\
|
|
|
|
& \ddots & \\
|
|
|
|
0 & & \lambda_n
|
|
|
|
\end{pmatrix}
|
|
|
|
$
|
2022-06-08 23:25:28 +02:00
|
|
|
, dann folgt\\
|
|
|
|
$\chi_A(\lambda) = \chi_{P^{-1} A P}(\lambda)
|
|
|
|
= \prod\limits_{i=1}^n (\lambda_i - \lambda) $
|
|
|
|
\item[$\impliedby$:] Induktion nach $n$
|
|
|
|
\begin{itemize}
|
|
|
|
\item[$n=1$:] Jede $1\times1$ Matrix ist obere Dreiecksmatrix.
|
|
|
|
\item[$n-1\mapsto n$:] Sei $\chi_A(\lambda) =
|
|
|
|
\prod\limits_{i=1}^n (\lambda_i - \lambda)$ und sei
|
|
|
|
$b_1 \in \eig_{\lambda_1}(\alpha)$.
|
|
|
|
Sei $B=(b_1, \dots, b_n)$ Basis von $\K^n$. Dann gilt
|
2023-01-31 13:30:38 +01:00
|
|
|
\[
|
|
|
|
\begin{aligned}
|
2022-06-08 23:25:28 +02:00
|
|
|
& A
|
|
|
|
= {}_B M(\alpha)_B
|
2023-01-31 13:30:38 +01:00
|
|
|
=
|
|
|
|
\begin{pmatrix}
|
|
|
|
\lambda_1 & a_{12} & \dots & a_{1n} \\
|
|
|
|
0 & \tl & & \\
|
|
|
|
\vdots & & \tilde{A} & \\
|
|
|
|
0 & & & \br
|
|
|
|
\end{pmatrix}
|
|
|
|
\\
|
2023-03-28 11:46:57 +02:00
|
|
|
& \text{Sei }\beta\colon
|
2023-01-31 13:30:38 +01:00
|
|
|
\begin{cases}
|
|
|
|
\overbrace{\linspan {b_2, \dots, b_n}}^{V}
|
|
|
|
& \to \linspan{ b_2, \dots, b_n} \\
|
|
|
|
b_i
|
|
|
|
& \mapsto \Phi^{-1}_{\tilde{B}}(C\cdot
|
|
|
|
{}_{\tilde{B}}v)
|
|
|
|
\end{cases}
|
|
|
|
\end{aligned}
|
|
|
|
\]
|
|
|
|
Es gilt $\chi_\alpha(\lambda) = \lambda_1 - \lambda) \cdot
|
|
|
|
\chi_\beta(\lambda)$, daher zerfällt $\chi_\beta$ in Linearfaktoren. Nach
|
|
|
|
Induktionsvoraussetzung existiert eine Basis $\tilde{B} = (\tilde{b}_2, \dots,
|
|
|
|
\tilde{b}_n)$ von $\tilde{V}$ mit
|
|
|
|
\begin{equation}{}_{\tilde{B}} M(\beta)_{\tilde{B}} =
|
|
|
|
\begin{pmatrix}
|
|
|
|
\lambda_2 & & * \\
|
|
|
|
& \ddots & \\
|
|
|
|
0 & & \lambda_n
|
|
|
|
\end{pmatrix}
|
2022-06-08 23:25:28 +02:00
|
|
|
\label{eq:2.2.22.1}
|
|
|
|
\end{equation}
|
|
|
|
Weiters ist $\alpha(b_i) = a_{1i} b_1 + \beta(b_i), i=2, \dots, n$.
|
|
|
|
Sei $\tilde{b}_i = \sum\limits_{j=2}^n \mu_{ij} b_j$.
|
|
|
|
Wegen \ref{eq:2.2.22.1} gilt
|
|
|
|
\begin{equation}
|
|
|
|
\beta(\tilde{b}_i) \in
|
2022-06-13 11:29:12 +02:00
|
|
|
\linspan{ \tilde{b}_1, \dots, \tilde{b}_i }
|
2022-06-08 23:25:28 +02:00
|
|
|
\label{eq:2.2.22.2}
|
|
|
|
\end{equation}
|
|
|
|
Wir zeigen nun, dass für die Basis $C=(c_1, \dots, c_n)$ mit
|
|
|
|
$c_1 = b_1, c_2 = \tilde{b}_2, \dots, c_n = \tilde{b}_i $
|
|
|
|
die Matrix ${}_C M(\alpha)_C$ obere Dreiecksgestalt hat.
|
|
|
|
Dies ist äquivalent zu
|
2022-06-13 11:29:12 +02:00
|
|
|
\[\alpha(c_i)\in \linspan{ c_1, \dots, c_n } \forall i=1, \dots, n \]
|
2022-06-08 23:25:28 +02:00
|
|
|
\begin{itemize}
|
|
|
|
\item [$i=1$:] $\alpha(c_1) = \alpha(b_1) = \lambda_1 b_1
|
2022-06-13 11:29:12 +02:00
|
|
|
\in \linspan{ b_1 } = \linspan{c_1}$
|
2022-06-08 23:25:28 +02:00
|
|
|
\item [$i>1$:]
|
|
|
|
\begin{align*}
|
|
|
|
& \alpha(c_i) = \alpha(\tilde{b}_i) =
|
2022-06-15 19:34:43 +02:00
|
|
|
\alpha\left(\sum_{j=2}^n \mu_{ij} b_j\right)
|
2022-06-08 23:25:28 +02:00
|
|
|
= \sum_{j=2}^n \mu_{ij} \alpha(b_j) \\
|
|
|
|
& = \sum_{j=2}^n\mu_{ij}(a_{1j} b_1 + \beta(b_j))
|
2022-06-15 19:34:43 +02:00
|
|
|
= \underbrace{\left(\sum_{j=2}^n \mu_{ij} a_{1j}\right)}_
|
|
|
|
{\displaystyle\sigma_i}
|
2022-06-08 23:25:28 +02:00
|
|
|
+ \sum_{j=2}^n \mu_{ij}\beta(b_j) \\
|
|
|
|
& = \sigma_i b_1+ \beta(\sum_{j=2}^n \mu_{ij} b_j)
|
|
|
|
= \sigma_i b_1 + \beta(\tilde{b}_i) \\
|
|
|
|
& \underbrace{\in}_{\text{\ref{eq:2.2.22.2}}}
|
2022-06-13 11:29:12 +02:00
|
|
|
\linspan{ b_1,\tilde{b}_2,\dots,\tilde{b}_i}
|
|
|
|
= \linspan{ c_1, \dots, c_i }
|
2022-06-08 23:25:28 +02:00
|
|
|
\end{align*}
|
|
|
|
\end{itemize}
|
|
|
|
\end{itemize}
|
|
|
|
\end{itemize}
|
|
|
|
\end{proof}
|
2022-04-12 12:48:05 +02:00
|
|
|
|
|
|
|
\section{Jordan Normalform}
|
|
|
|
|
|
|
|
\begin{defin}
|
|
|
|
Eine $m\times m$ Matrix
|
2023-01-31 13:30:38 +01:00
|
|
|
\[J_m(\lambda) :=
|
|
|
|
\begin{pmatrix}
|
2022-04-12 12:48:05 +02:00
|
|
|
\lambda & 1 & 0 & \dots & 0 \\
|
|
|
|
0 & \ddots & \ddots & \ddots & \vdots \\
|
|
|
|
\vdots & \ddots & \ddots & \ddots & 0 \\
|
|
|
|
\vdots & \ddots & \ddots & \ddots & 1 \\
|
|
|
|
0 & \dots & \dots & 0 & \lambda
|
2023-01-31 13:30:38 +01:00
|
|
|
\end{pmatrix}
|
|
|
|
\]
|
|
|
|
heißt \underline{Jordanblock} der Dimension $m$ zum Eigenwert $\lambda$.\\ Eine
|
|
|
|
Matrix $A \in \K^{\nxn}$, die als Blockdiagonalmatrix aus Jordanblöcken
|
|
|
|
besteht, heißt \underline{Jordanmatrix}. \\ $A \in \K^{\nxn}$ besitzt eine
|
|
|
|
\underline{Jordan-Normalform} wenn $P\in\K^{\nxn}$ invertierbar existiert,
|
|
|
|
sodass $P^{-1}AP$ Jordanmatrix ist.\\ $\alpha \in \homkv$ besitzt eine
|
|
|
|
\underline{Jordan-Normalform} wenn eine Basis $B$ von $V$ existiert, sodass $
|
|
|
|
{}_{B} M(\alpha)_{B} $ Jordanmatrix ist.\\ B heißt \underline{Jordanbasis} zu
|
|
|
|
$A/\alpha$.
|
2022-04-12 12:48:05 +02:00
|
|
|
\end{defin}
|
|
|
|
|
|
|
|
\subsubsection{Beispiel}
|
|
|
|
\begin{itemize}
|
|
|
|
\item Jede Diagonalmatrix ist Jordanmatrix
|
2023-01-31 13:30:38 +01:00
|
|
|
\item $
|
|
|
|
\begin{pmatrix}
|
|
|
|
1
|
|
|
|
\end{pmatrix}
|
|
|
|
,
|
|
|
|
\begin{pmatrix}
|
|
|
|
1 & 1 \\
|
|
|
|
0 & 1
|
|
|
|
\end{pmatrix}
|
|
|
|
,
|
|
|
|
\begin{pmatrix}
|
|
|
|
0 & 1 \\
|
|
|
|
0 & 0
|
|
|
|
\end{pmatrix}
|
|
|
|
,
|
|
|
|
\begin{pmatrix}
|
|
|
|
1 & 1 & 0 \\
|
|
|
|
0 & 1 & 0 \\
|
|
|
|
0 & 0 & 2
|
|
|
|
\end{pmatrix}
|
|
|
|
,
|
|
|
|
\xcancel{
|
|
|
|
\begin{pmatrix}
|
|
|
|
1 & 1 \\
|
|
|
|
0 & 2
|
|
|
|
\end{pmatrix}
|
|
|
|
}$
|
2022-04-28 10:33:22 +02:00
|
|
|
\item \(
|
2023-01-31 13:30:38 +01:00
|
|
|
\begin{pmatrix}
|
|
|
|
\tl3\br \\
|
2022-04-28 10:33:22 +02:00
|
|
|
& \tl2 & 1 \\
|
2023-01-31 13:30:38 +01:00
|
|
|
& & 2\br
|
|
|
|
\end{pmatrix}
|
|
|
|
,
|
|
|
|
\begin{pmatrix}
|
2022-04-28 10:33:22 +02:00
|
|
|
\tl0 & 1 \\
|
|
|
|
0 & 0\br \\
|
|
|
|
& & \tl-1\br
|
2023-01-31 13:30:38 +01:00
|
|
|
\end{pmatrix}
|
|
|
|
\)
|
2022-04-12 12:48:05 +02:00
|
|
|
\end{itemize}
|
|
|
|
Wir wollen zeigen, dass $\alpha/A$ genau dann eine Jordan-Normalform besitzt, wenn $\alpha/A$ triangulierbar ist.
|
|
|
|
|
|
|
|
\subsubsection{Bemerkung}
|
|
|
|
\begin{itemize}
|
|
|
|
\item $\chi_{J_m(\lambda)}(\mu) = (\lambda - \mu)^m \implies \spec(J_m(\lambda)) = \{\lambda\}$ \\
|
2023-01-31 13:30:38 +01:00
|
|
|
$J_m(\lambda) - \lambda I =
|
|
|
|
\begin{pmatrix}
|
2022-04-12 12:48:05 +02:00
|
|
|
0 & 1 & 0 & \dots & 0 \\
|
|
|
|
0 & \ddots & \ddots & \ddots & \vdots \\
|
|
|
|
\vdots & \ddots & \ddots & \ddots & \vdots \\
|
|
|
|
\vdots & & \ddots & \ddots & 1 \\
|
|
|
|
0 & \dots & \dots & 0 & 0
|
2023-01-31 13:30:38 +01:00
|
|
|
\end{pmatrix}
|
|
|
|
$\\
|
2022-04-12 12:48:05 +02:00
|
|
|
$\implies \dim(\eig_{J_m(\lambda)}(\lambda)) = \dim(\ker(J_m(\lambda) - \lambda I)) = 1$ \\
|
|
|
|
$\implies m_g(\lambda) = 1$ und $m_a(\lambda) = m$.
|
|
|
|
|
|
|
|
\item $J_m(0)^m = 0$, das heißt $J_m(0)$ ist \underline{nilpotent}.
|
|
|
|
\begin{align*}
|
2023-03-28 11:46:57 +02:00
|
|
|
& J_m(0)(e_i)\colon
|
2023-01-31 13:30:38 +01:00
|
|
|
\begin{cases}
|
|
|
|
e_{i-1} & i \in \{2, \dots, m\} \\
|
|
|
|
0 & \text{sonst}
|
|
|
|
\end{cases}
|
|
|
|
\\
|
2023-03-28 11:46:57 +02:00
|
|
|
& J_m(0)^l(e_i)\colon
|
2023-01-31 13:30:38 +01:00
|
|
|
\begin{cases}
|
|
|
|
e_{i-l} & i \in \{l+1, \dots, m\} \\
|
|
|
|
0 & \text{sonst}
|
|
|
|
\end{cases}
|
2022-04-12 12:48:05 +02:00
|
|
|
\end{align*}
|
|
|
|
\end{itemize}
|
|
|
|
|
|
|
|
\begin{defin}
|
2022-04-28 10:33:22 +02:00
|
|
|
$\alpha \in \homkv$ oder $A\in \K^{\nxn}$ heißt \underline{nilpotent} (mit Index $m$) falls
|
2023-03-28 11:46:57 +02:00
|
|
|
$\alpha^m = 0 / A^m = 0$ und $\forall l \in [m-1]\colon \alpha^l \neq 0 / A^l \neq 0$.
|
2022-04-12 12:48:05 +02:00
|
|
|
\end{defin}
|
|
|
|
|
|
|
|
\begin{lemma}
|
2022-06-08 23:25:28 +02:00
|
|
|
|
2022-04-12 12:48:05 +02:00
|
|
|
\label{theo:2.3.3}
|
2022-04-28 10:33:22 +02:00
|
|
|
Sei $\alpha \in \homkv, \dim(V)=n$ nilpotent mit Index $m$. Dann existiert eine Basis $B$ mit
|
2022-04-12 12:48:05 +02:00
|
|
|
\begin{equation*}
|
|
|
|
{}_B M(\alpha)_B =
|
|
|
|
\begin{pmatrix}
|
|
|
|
0 & \delta_1 & & \\
|
|
|
|
& \ddots & \ddots & \\
|
|
|
|
& & \ddots & \delta_{n-1} \\
|
|
|
|
& & & 0
|
|
|
|
\end{pmatrix}
|
|
|
|
\text{ und } \delta_i \in \{0, 1\} \forall i \in [n-1]
|
|
|
|
\end{equation*}
|
|
|
|
Das heißt ${}_B M(\alpha)_B$ ist blockdiagonal mit Jordanblöcken mit Eigenwerten $0$
|
|
|
|
\end{lemma}
|
2022-06-08 23:25:28 +02:00
|
|
|
\begin{proof}
|
|
|
|
Sei $V_i := \ker(\alpha^i)$. \\
|
|
|
|
Dies ergibt eine aufsteigende Kette von Unterräumen
|
|
|
|
\begin{equation*}
|
|
|
|
\underbrace{\{0\}}_{=V_0} \subseteq V_1 \subseteq \cdots \subseteq \underbrace{V_m}_{=V}
|
|
|
|
\end{equation*}
|
|
|
|
Wir bauen uns iterativ eine Basis für Komplemente $W_i$ mit $V_{i-1} \oplus W_i = V_i$.
|
|
|
|
Sei also $B^{m-1}$ Basis von $V_{m-1}$. \\
|
2022-06-30 13:04:01 +02:00
|
|
|
$C^m = (c_1^m, \dots, c_{r_{m}}^m)$ Basis von $W_m$
|
2022-06-08 23:25:28 +02:00
|
|
|
[das heißt $C^m$ ergänzt die Basis $B^{m-1}$ zu Basis von $V^m$]. \\
|
|
|
|
\underline{Behauptung}
|
|
|
|
\begin{enumerate} [label=\arabic*)]
|
|
|
|
\item $\alpha(C^m) \subseteq V_{m-1}$
|
|
|
|
\item $\alpha(C^m)$ linear unabhängig
|
2022-06-13 11:29:12 +02:00
|
|
|
\item $\linspan{ \alpha(C^m) } \cap V_{m-2} = \{0\}$
|
2022-06-08 23:25:28 +02:00
|
|
|
\end{enumerate}
|
|
|
|
\begin{proof}[Zwischenbeweis]
|
|
|
|
\begin{itemize}
|
|
|
|
\item[1)] folgt aus $\alpha(V_{i+1}) \subseteq \alpha(V_i)$
|
|
|
|
\item[3)] Sei $\sum\limits_{i}\mu_i \alpha(c_i^m) \in V_{m-2}$
|
|
|
|
\begin{align*}
|
2022-06-30 13:04:01 +02:00
|
|
|
& \implies \alpha^{m-2} \left(\sum_{i}\mu_i \alpha(c_i^m)\right) = 0 \\
|
|
|
|
& \implies \alpha^{m-1} \left(\sum_{i} \mu_i c_i^m \right) = 0 \\
|
|
|
|
& \implies \sum \mu_i c_i^m \in V_{m-1} \\
|
|
|
|
& \underbrace{\implies}_{\mathclap{\substack{(c_i^m) \text{ liegen} \\
|
|
|
|
\text{im Komplement} \\
|
2023-01-31 13:30:38 +01:00
|
|
|
\text{von } V_{m-1}}}}
|
2022-06-08 23:25:28 +02:00
|
|
|
\mu_i = 0, \forall i \implies \sum_{i} \mu_i \alpha(c_i^m) = 0
|
|
|
|
\end{align*}
|
|
|
|
\item[2)] folgt aus 3) [da $0\in V_{m-2}$]
|
|
|
|
\end{itemize}
|
|
|
|
\end{proof}
|
|
|
|
Es folgt, dass
|
|
|
|
\[
|
2022-06-30 13:04:01 +02:00
|
|
|
\underbrace{V_{m-2} \oplus
|
|
|
|
\overset{\linspan{ D^{m-1} }}{\linspan{ \alpha(C^m) } \oplus\linspan{C^{m-1}}}}_{V_{m-1}} \oplus
|
2022-06-13 11:29:12 +02:00
|
|
|
\overset{\linspan{ D^m }}{\linspan{C^m}} = V
|
2022-06-08 23:25:28 +02:00
|
|
|
\]
|
|
|
|
Setze $D^m := C^m$ und definiere induktiv für $D^i \subseteq V_i$ die Menge
|
2023-01-31 13:30:38 +01:00
|
|
|
$D^{i-1} := \alpha(D^i) \cup C^{i-1} \subseteq V_{i-1}$ sodass mit einer Basis
|
|
|
|
$B^{i-2}$ von $V_{i-2}$ die Menge $B^{i-2} \cup D^{i-1}$ Basis von $V_{i-1}$
|
|
|
|
ist, also
|
2022-06-08 23:25:28 +02:00
|
|
|
\[
|
2022-06-13 11:29:12 +02:00
|
|
|
V_{i-2} \oplus \underbrace{\linspan{ \alpha(D^i) } \oplus \linspan {C^{i-1}}}_
|
|
|
|
{\linspan{ D^i }}
|
2022-06-08 23:25:28 +02:00
|
|
|
= V_{i-1} \text{$\leftarrow$ das geht nach obiger Behauptung}
|
|
|
|
\]
|
2023-01-31 13:30:38 +01:00
|
|
|
Nach Konstruktion ist $(D^1, \dots, D^m)$ Basis von $V$. Sie besteht aus
|
|
|
|
folgenden Elementen:
|
2022-06-08 23:25:28 +02:00
|
|
|
\begin{align*}
|
|
|
|
\left.
|
|
|
|
\begin{array}{lll}
|
|
|
|
J_m(0) \to & \alpha^{m-1}(d_1^m), \dots, \alpha(d_1^m), & d_1^m \\
|
|
|
|
& & \vdots \\
|
|
|
|
J_m(0) \to & \alpha^{m-1}(d_{r_m}^m), \dots, \alpha(d_{r_m}^m), & d_{r_m}^m
|
|
|
|
\end{array}
|
|
|
|
\right\} \in V_m \\
|
|
|
|
\left.
|
|
|
|
\begin{array}{lll}
|
2022-06-30 13:04:01 +02:00
|
|
|
J_{m-1}(0) \to & \alpha^{m-2}(d_1^{m-1}), \dots, \alpha(d_1^{m-1}), & d_1^{m-1} \\
|
2022-06-08 23:25:28 +02:00
|
|
|
& & \vdots \\
|
2022-06-30 13:04:01 +02:00
|
|
|
J_{m-1}(0) \to & \alpha^{m-2}(d_{r_{m-1}}^{m-1}), \dots, \alpha(d_{r_{m-1}}^{m-1}), & d_{r_{m-1}}^{m-1}
|
2022-06-08 23:25:28 +02:00
|
|
|
\end{array}
|
|
|
|
\right\} \in V_{m-1} \\
|
2023-01-31 13:30:38 +01:00
|
|
|
\left.
|
|
|
|
\begin{array}{lr}
|
|
|
|
J_1(0) \to & d_1^1 \\
|
|
|
|
& \vdots \\
|
|
|
|
J_1(0) \to & d_{r_1}^1
|
|
|
|
\end{array}
|
2022-06-08 23:25:28 +02:00
|
|
|
\right\} V_1 = \ker(\alpha)
|
|
|
|
\end{align*}
|
|
|
|
Wenn wir die Basiselemente von links nach rechts und von oben nach unten ordnen erhalten wir
|
|
|
|
die gewünschte Gestalt.
|
|
|
|
\end{proof}
|
2022-04-12 12:48:05 +02:00
|
|
|
|
|
|
|
\subsubsection{Bemerkung}
|
2023-03-28 11:46:57 +02:00
|
|
|
Angenommen \(\alpha - \lambda \id\colon V \to V\) nilpotent. Dann besitzt
|
|
|
|
\(\alpha\) nach Lemma \ref{theo:2.3.3} Jordan-Normalform.
|
2022-04-12 12:48:05 +02:00
|
|
|
|
|
|
|
\begin{defin}
|
|
|
|
\label{theo:2.3.4}
|
2022-06-30 13:04:01 +02:00
|
|
|
Sei \(V\) \K-Vektorraum, \(\dim(V) < \infty, \alpha \in \homkv\) und \(\lambda \in \spec(\alpha)\).
|
2022-04-12 12:48:05 +02:00
|
|
|
Für \(l \in \mathbb{N}\) definiere \(V_{l, \lambda}:= \ker((\alpha - \lambda \id)^l)\)
|
|
|
|
\end{defin}
|
|
|
|
|
|
|
|
\subsubsection{Bemerkung}
|
|
|
|
\begin{itemize}
|
2022-04-28 10:33:22 +02:00
|
|
|
\item $\alpha - \lambda \id|_{V_{l, \lambda}} \in \homk(V_{l, \lambda}, V_{l, \lambda})$:
|
2022-04-12 12:48:05 +02:00
|
|
|
\begin{align*}
|
|
|
|
\text{zu Zeigen: } v\in V_{l, \lambda} & \implies \alpha(v) - \lambda v \in V_{l, \lambda}\text{, das heißt} \\
|
|
|
|
(\alpha - \lambda \id)^l v = 0 & \implies (\alpha - \lambda \id)^{l-1} (\alpha - \lambda \id) v = 0 \\
|
|
|
|
& \implies (\alpha - \lambda \id)(v) \in V_{l, \lambda} & \square
|
|
|
|
\end{align*}
|
2023-01-31 13:30:38 +01:00
|
|
|
\item Nach Lemma \ref{theo:2.3.3} gibt es also Basis von $V_{l, \lambda}$ bezüglich
|
2023-03-28 11:46:57 +02:00
|
|
|
derer $\alpha - \lambda \id |_{V_{l, \lambda}}\colon V_{l, \lambda} \to V_{l,
|
2023-01-31 13:30:38 +01:00
|
|
|
\lambda}$ Jordan-Normalform hat
|
2022-04-12 12:48:05 +02:00
|
|
|
\end{itemize}
|
|
|
|
|
|
|
|
\begin{lemma}
|
2022-06-08 23:25:28 +02:00
|
|
|
|
2022-04-12 12:48:05 +02:00
|
|
|
\label{theo:2.3.5}
|
2022-06-30 13:04:01 +02:00
|
|
|
Sei $V$ \K-Vektorraum, $\dim(V) < \infty, \alpha \in \homkv$. Für $l\in\mathbb{N}$ sei
|
2022-04-12 12:48:05 +02:00
|
|
|
$V_l := \ker(\alpha^l)$. Dann gilt $\alpha(V_l) \subseteq V_{l-1} \subseteq V_l$ für alle
|
|
|
|
$l\in \mathbb{N}$ und es existiert genau ein $k\in \mathbb{N}_0$ mit
|
|
|
|
\[
|
2022-05-07 23:36:49 +02:00
|
|
|
\{0\} = V_0 \subseteq V_1 \subseteq \cdots \subseteq V_k = V_{k+1} \text{ und } V_{l+1} =
|
|
|
|
V_l, \forall l \ge k
|
2022-04-12 12:48:05 +02:00
|
|
|
\]
|
|
|
|
\end{lemma}
|
2022-06-08 23:25:28 +02:00
|
|
|
\begin{proof}
|
|
|
|
Da $\dim(V) < \infty$ muss es ein kleinstes $k$ mit $V_{k+1} = V_{k}$ geben.
|
|
|
|
Angenommen $\exists l\ge k$ mit $V_{l+1} \neq V_l$. Sei $0\neq v\in V_{l+1} \setminus V_l$
|
|
|
|
$\implies 0 = \alpha^{l+1}(v) = \alpha^{k+1}(\alpha^{l-k}(v))$ und $0\neq \alpha^l(v)
|
|
|
|
= \alpha^k (\alpha^{l-k}(v)) \implies 0\neq \alpha^{l-k}(v) \in V_{k+1}\setminus V_k$
|
2023-01-04 23:18:29 +01:00
|
|
|
\qed
|
2022-06-08 23:25:28 +02:00
|
|
|
\end{proof}
|
2022-04-12 12:48:05 +02:00
|
|
|
|
|
|
|
\begin{defin}
|
|
|
|
Sei $V_{l, \lambda}$ wie in Definition \ref{theo:2.3.4} und $k$ wie in Lemma \ref{theo:2.3.5}
|
|
|
|
Dann heißt \[
|
|
|
|
\widetilde{\eig_\alpha(\lambda)} := V_{k, \lambda} = V_{k+1, \lambda}
|
|
|
|
\]
|
|
|
|
\underline{verallgemeinerter Eigenraum} oder \underline{Hauptraum} von $\alpha$ zum Eigenwert
|
|
|
|
$\lambda$. $v \in V_{l, \lambda} \setminus V_{l-1, \lambda}$ für $1 \le l \le k$ heißt
|
|
|
|
\underline{verallgemeinerter Eigenvektor} der Ordnung $l$.
|
|
|
|
\end{defin}
|
|
|
|
|
|
|
|
\subsubsection{Idee}
|
|
|
|
\begin{itemize}
|
2023-03-28 11:46:57 +02:00
|
|
|
\item $\alpha|_{\widetilde{\eig_\alpha(\lambda)}}\colon \widetilde{\eig_\alpha(\lambda)} \to
|
2022-05-07 23:36:49 +02:00
|
|
|
\widetilde{\eig_\alpha(\lambda)}$
|
2022-04-12 12:48:05 +02:00
|
|
|
hat Jordan-Normalform.
|
2023-01-31 13:30:38 +01:00
|
|
|
Zerlege
|
|
|
|
\begin{equation}
|
2022-04-12 12:48:05 +02:00
|
|
|
\label{eq:2.3.6.1}
|
|
|
|
V:= \widetilde{\eig_\alpha(\lambda_1)} \oplus \cdots \oplus \widetilde{\eig_\alpha(\lambda_r)}
|
|
|
|
\end{equation}
|
2023-03-28 11:46:57 +02:00
|
|
|
dann besitzt ganz $\alpha\colon V\to V$ Jordan-Normalform
|
2023-01-31 13:30:38 +01:00
|
|
|
\item Sei $V= V_1 \oplus \cdots \oplus V_r$ und $\alpha \in \homkv$. Falls
|
|
|
|
$\alpha(V_i) \subseteq V_i$ für alle $i \in [r]$, dann schreiben wir $\alpha =
|
|
|
|
\alpha_1 \oplus \cdots \oplus \alpha_r$ mit $\alpha_i = \alpha|_{V_i} \forall i
|
|
|
|
\in [r]$. Für $v= v_1 + \cdots + v_r, v_i \in V_i, \forall i \in [r]$ gilt also
|
|
|
|
$\alpha(v) = \alpha_1(v_1) + \cdots + \alpha_r(v_r)$. Sei $B_i = \{b_1^i,
|
|
|
|
\dots, b_{d_i}^i\}$ Basis von $V_i$ und $B = (B_1, \dots, B_r)$. Dann hat ${}_B
|
|
|
|
M(\alpha)_B$ Blockdiagonalgestalt mit Blöcken ${}_{B_i} M(\alpha_i)_{B_i}$, das
|
|
|
|
heißt
|
2022-05-07 23:36:49 +02:00
|
|
|
\[
|
2023-01-31 13:30:38 +01:00
|
|
|
{}_B M(\alpha)_B =
|
|
|
|
\begin{pmatrix}
|
2022-04-12 12:48:05 +02:00
|
|
|
\overbrace{{}_{B_1} M(\alpha_1)_{B_1}}^{\in \K^{d_1 \times d_1}} & & 0 \\
|
|
|
|
& \ddots & \\
|
|
|
|
0 & & \underbrace{{}_{B_r} M(\alpha_r)_{B_r}}_{\in\K^{d_r \times d_r}}
|
|
|
|
\end{pmatrix}
|
|
|
|
\]
|
2023-01-31 13:30:38 +01:00
|
|
|
Insbesondere gilt $\chi_\alpha = \chi_{\alpha_1} \cdot \dots \cdots
|
|
|
|
\chi_{\alpha_r}$
|
|
|
|
\item Da wir schon wissen, dass $\alpha|_{\widetilde{\eig_\alpha(\lambda_i)}}$
|
|
|
|
Jordan-Normalform hat folgt \\ Jordan-Normalform für $\alpha$ wenn
|
|
|
|
\ref{eq:2.3.6.1} gezeigt werden kann.
|
2022-04-12 12:48:05 +02:00
|
|
|
\end{itemize}
|
|
|
|
|
|
|
|
\begin{satz}
|
2022-06-08 23:25:28 +02:00
|
|
|
|
2022-04-12 12:48:05 +02:00
|
|
|
\label{theo:2.3.7}
|
2022-04-28 10:33:22 +02:00
|
|
|
Sei $V \K$-Vektorraum mit $\dim(V) < \infty$ und $\alpha \in \homkv$ sodass
|
2022-04-12 12:48:05 +02:00
|
|
|
$\chi_\alpha(\lambda) = (\lambda_1 - \lambda) \cdots (\lambda_r - \lambda)$ in Linearfaktoren
|
|
|
|
zerfällt. Dann gilt
|
|
|
|
$V = \widetilde{\eig_\alpha(\lambda_1)} \oplus \cdots \oplus \widetilde{\eig_\alpha(\lambda_r)}$
|
2022-04-22 10:56:47 +02:00
|
|
|
und insbesondere $\alpha = \alpha_1 \oplus \cdots \oplus \alpha_r$ mit
|
2022-05-07 23:36:49 +02:00
|
|
|
$\alpha_i := \alpha|_{\widetilde{\eig_\alpha(\lambda_i)}} \in \homk(\widetilde{\eig_\alpha(\lambda_i)},
|
|
|
|
\widetilde{\eig_\alpha(\lambda_i)})$
|
2022-06-08 23:25:28 +02:00
|
|
|
\end{satz}
|
|
|
|
\begin{proof}
|
|
|
|
Induktion nach $\dim(V)$.
|
|
|
|
\begin{itemize}
|
|
|
|
\item[$n=1$:] \checkmark
|
|
|
|
\item[$n-1 \mapsto n$:] Da $\chi_A$ in Linearfaktoren zerfällt besitzt es eine Nullstelle
|
|
|
|
$\lambda \in \spec(\alpha)$.
|
|
|
|
\begin{enumerate}[label=Fall \arabic*:]
|
|
|
|
\item $\widetilde{\eig_\alpha(\lambda)} = V$ \\
|
|
|
|
\underline{Behauptung:} $\spec(\alpha) = \{\lambda\}$
|
|
|
|
\begin{proof}[Zwischenbeweis]
|
|
|
|
Angenommen $\lambda' \neq \lambda$ und $\lambda' \in \spec(\alpha)$ und
|
|
|
|
$v\in \eig_\alpha(\lambda')$. \\
|
|
|
|
$\implies (\alpha - \lambda \id) (v) = \alpha(v) - \lambda'v + (\lambda' - \lambda) v
|
|
|
|
= (\lambda' - \lambda)(v)$ \\
|
|
|
|
$\implies (\alpha - \lambda \id)^l (v) \neq 0,\forall l \in \mathbb{N}$\Lightning \\
|
|
|
|
Daraus folgt das gewünschte Resultat
|
|
|
|
\end{proof}
|
|
|
|
\item $\widetilde{\eig_\alpha(\lambda)} \neq V$. Sei $k$ minimal mit
|
|
|
|
$\ker(\alpha - \lambda - \id)^k = \widetilde{\eig_\alpha(\lambda)}$ [Lemma \ref{theo:2.3.5}]
|
|
|
|
Setze $V_1 := \widetilde{\eig_\alpha(\lambda)}, V_2 := \im(\alpha - \lambda \id)^k$. \\
|
|
|
|
\underline{Behauptung:}
|
|
|
|
\begin{enumerate}[label=(\roman*)]
|
|
|
|
\item $\alpha(V_i) \subseteq V_i, i \in \{1, 2\}$
|
|
|
|
\item $V = V_1 \oplus V_2$
|
|
|
|
\end{enumerate}
|
|
|
|
\begin{proof}[Zwischenbeweis]
|
2022-04-12 12:48:05 +02:00
|
|
|
\begin{enumerate}[label=(\roman*)]
|
2022-06-08 23:25:28 +02:00
|
|
|
\item Wir zeigen $(\alpha - \lambda \id)(V_i) \subseteq V_i$.
|
|
|
|
\begin{itemize}
|
|
|
|
\item[$i=1$:] Sei $v\in V_1 = \ker(\alpha - \lambda \id)^k$. Dann gilt klarerweise
|
|
|
|
$(\alpha - \lambda \id)(v) \in \ker(\alpha - \lambda \id)^k \checkmark$
|
|
|
|
\item[$i=2$:] Sei $v \in \im(\alpha - \lambda \id)^k$, also
|
|
|
|
$v = (\alpha - \lambda \id)^k (w)$
|
|
|
|
$\implies (\alpha - \lambda \id)(v)
|
|
|
|
= (\alpha - \lambda \id)^k (\alpha - \lambda \id)(w) \in
|
|
|
|
\im(\alpha - \lambda \id)^k \checkmark$
|
|
|
|
\end{itemize}
|
2023-01-31 13:30:38 +01:00
|
|
|
\item Es gilt $\dim(V) = \dim(V_1) + \dim(V_2)$ nach der Dimensionsformel. Es genügt
|
|
|
|
also zu zeigen, dass $V_1 \cap V_2 = \{0\}$. \\ Sei $v\in V_1 \cap V_2$
|
2022-06-08 23:25:28 +02:00
|
|
|
\begin{align*}
|
2023-03-28 11:46:57 +02:00
|
|
|
& \underbrace{\implies}_{v\in V_2} \exists w\in V\colon v = (\alpha - \lambda \id)^k(w) \\
|
|
|
|
& \underbrace{\implies}_{v\in V_1} (\alpha - \lambda \id)^{2k}(w) = 0 \\
|
2022-06-08 23:25:28 +02:00
|
|
|
& \implies w \in V_{2k, \lambda} \setminus V_{k, \lambda}
|
|
|
|
\underbrace{\implies}_{\mathclap{\text{Lemma \ref{theo:2.3.5}}}} w = \{0\}\checkmark
|
|
|
|
\end{align*}
|
2022-04-12 12:48:05 +02:00
|
|
|
\end{enumerate}
|
2022-06-08 23:25:28 +02:00
|
|
|
\end{proof}
|
|
|
|
\end{enumerate}
|
|
|
|
|
2023-01-31 13:30:38 +01:00
|
|
|
Es folgt $V = \underbrace{\widetilde{\eig(\lambda)}}_{V_1} \oplus V_2,
|
|
|
|
\dim(V_2) < n$ und \\ $\alpha = \alpha_1 \oplus \alpha_2, \alpha_i :=
|
|
|
|
\alpha|_{V_i}, i\in\{1, 2\}$. Es folgt $\chi_\alpha = \chi_{\alpha_1} \cdot
|
|
|
|
\chi_{\alpha_2}$, also zerfällt $\chi_{\alpha_2}$ in Linearfaktoren. Daher
|
|
|
|
können wir die Induktionsvorraussetzung anwenden, was das gewünschte Resultat
|
|
|
|
lierfert.
|
2022-06-08 23:25:28 +02:00
|
|
|
\end{itemize}
|
|
|
|
\end{proof}
|
2022-04-12 12:48:05 +02:00
|
|
|
|
|
|
|
\begin{satz}
|
2022-06-08 23:25:28 +02:00
|
|
|
|
2023-01-31 13:30:38 +01:00
|
|
|
Sei $V$ \K-Vektorraum, $\dim(V) < \infty$ und $\alpha \in \homkv$ sodass
|
|
|
|
$\chi_A$ in Linearfaktoren zerfällt. Dann besitzt $\alpha$ Jordan-Normalform.
|
2022-04-12 12:48:05 +02:00
|
|
|
\end{satz}
|
2022-06-08 23:25:28 +02:00
|
|
|
\begin{proof}
|
|
|
|
Zerlege nach Satz \ref{theo:2.3.7}
|
|
|
|
$V = \widetilde{\eig_\alpha(\lambda_1)} \oplus \cdots \oplus \widetilde{\eig_\alpha(\lambda_r)}$
|
|
|
|
und \\
|
2022-06-30 13:04:01 +02:00
|
|
|
$\alpha = \alpha_1 \oplus \cdots \oplus \alpha_r$.
|
2022-06-08 23:25:28 +02:00
|
|
|
Da $\widetilde{\eig_\alpha(\lambda_i)} = \ker(\alpha - \lambda_i \id)^{k_i}$ ist
|
|
|
|
$\alpha_i - \lambda_i \id := \alpha|_{\widetilde{\eig_\alpha(\lambda_i)}} - \lambda
|
|
|
|
\id|_{\widetilde{\eig_\alpha(\lambda_i)}} $ nilpotent. Nach Lemma \ref{theo:2.3.3} gibt es eine Basis
|
|
|
|
$B_i$ von $\widetilde{\eig_\alpha(\lambda_i)}$ sodass ${}_{B_i} M(\alpha_i)_{B_i}$ Jordan-Normalform
|
2023-01-31 13:30:38 +01:00
|
|
|
hat. Es folgt mit $B= (B_1, \dots, B_r)$ dass \\ ${}_B M(\alpha)_B =
|
|
|
|
\begin{pmatrix}{}_{B_1} M(\alpha_1)_{B_1} & & \\
|
|
|
|
& \ddots & \\
|
|
|
|
& & {}_{B_r}M(\alpha_r)_{B_r}
|
|
|
|
\end{pmatrix}
|
|
|
|
$ Jordanmatrix ist.
|
2022-06-08 23:25:28 +02:00
|
|
|
\end{proof}
|
2022-04-12 12:48:05 +02:00
|
|
|
|
2022-04-27 12:36:04 +02:00
|
|
|
\subsubsection{Berechnung der Jordan-Normalform}
|
|
|
|
\begin{enumerate}
|
2023-01-31 13:30:38 +01:00
|
|
|
\item Berechne $\spec(\alpha) = \{ \lambda_1, \dots, \lambda_r \}$ \item
|
2022-04-27 16:18:26 +02:00
|
|
|
\begin{enumerate}[label=\alph*)]
|
|
|
|
\item Haupträume berechnen: Finde $k$ minimal mit \[
|
|
|
|
\ker(\alpha - \lambda \id)^{k+1} = \ker(\alpha - \lambda \id)^k =: V_\lambda
|
|
|
|
\]
|
2023-01-31 13:30:38 +01:00
|
|
|
\item Für $1 \le l \le k$ bestimme $B_l = \{ b_1^l, \dots, b_{r_l}^l\}$, sodass
|
|
|
|
$(B_1, \dots, B_l)$ Basis von $\ker(\alpha - \lambda \id)^l$.
|
2022-04-27 16:18:26 +02:00
|
|
|
\end{enumerate}
|
|
|
|
\item
|
|
|
|
\begin{enumerate}[label=\alph*)]
|
2023-01-31 13:30:38 +01:00
|
|
|
\item Setze zunächst $v_i^k = b_i^k, i = 1, \dots, r_k$. $D_k := (v_1^k, \dots,
|
|
|
|
v_{r_k}^k)$ \\ Setze $v_i^{k-1} := (\alpha - \lambda \id)(v_i^k) \in \linspan{
|
|
|
|
B_{k-1} }, i = 1, \dots, r_k$ \\ Ergänze gegebenenfalls $(v_1^{k-1}, \dots,
|
|
|
|
v_{r_k}^{k-1}, v_{r_{k+1}}^{k-1}, \dots, v_{r_{k-1}}^{k-1})=:D_{k-1}$, sodass
|
|
|
|
\\ $\linspan{ D_{k-1} } = \linspan {B_{k-1}}$
|
|
|
|
\item Führe 3a) iterativ aus. \\ Setze $v_i^{l-1} := (\alpha - \lambda \id)(v_i^l), i
|
|
|
|
= 1, \dots, r_l$ \\ Ergänze gegebenenfalls $v_1^{l-1}, \dots, v_{r_l}^{l-1},
|
|
|
|
v_{r_{l+1}}^{l-1}, \dots, v_{r_{l-1}}^{l-1} =:D_{l-1}$, sodass $\linspan{
|
|
|
|
D_{l-1} } = \linspan{B_{l-1}}$
|
2022-04-27 16:18:26 +02:00
|
|
|
\end{enumerate}
|
2023-01-31 13:30:38 +01:00
|
|
|
\item Sei $B_\lambda = (D_1, \dots, D_k) \implies {}_{B_\lambda}
|
|
|
|
M(\alpha|_{v_\lambda})_{B_\lambda}$ hat Jordan-Normalform mit Eigenwert
|
|
|
|
$\lambda$.
|
|
|
|
\item Setze $B = (B_{\lambda_1}, \dots, B_{\lambda_r}) \implies {}_B M(\alpha)_B$ hat
|
|
|
|
Jordan-Normalform.
|
2022-04-27 16:18:26 +02:00
|
|
|
\end{enumerate}
|
2022-04-27 12:36:04 +02:00
|
|
|
|
2022-04-27 23:03:56 +02:00
|
|
|
\subsubsection{Beispiel}
|
|
|
|
\begin{align*}
|
2023-01-31 13:30:38 +01:00
|
|
|
& A=
|
|
|
|
\begin{pmatrix}
|
|
|
|
1 & 0 & 2 & 3 & 4 \\
|
|
|
|
0 & 1 & 0 & -2 & -3 \\
|
|
|
|
0 & 0 & 1 & 0 & 2 \\
|
|
|
|
0 & 0 & 0 & 1 & -1 \\
|
|
|
|
0 & 0 & 0 & 0 & 1
|
|
|
|
\end{pmatrix}
|
2022-06-13 11:29:12 +02:00
|
|
|
, \chi_A(\lambda) = (\lambda - 1)^5 \\
|
2023-01-31 13:30:38 +01:00
|
|
|
& (A - 1\cdot I) =
|
|
|
|
\begin{pmatrix}
|
|
|
|
0 & 0 & 2 & 3 & 4 \\
|
|
|
|
0 & 0 & 0 & -2 & -3 \\
|
|
|
|
0 & 0 & 0 & 0 & 2 \\
|
|
|
|
0 & 0 & 0 & 0 & -1 \\
|
|
|
|
0 & 0 & 0 & 0 & 0
|
|
|
|
\end{pmatrix}
|
2022-06-13 11:29:12 +02:00
|
|
|
\implies \ker(A - I) = \linspan{ ( \underbrace{e_1, e_2}_{B_1} ) } \\
|
2023-01-31 13:30:38 +01:00
|
|
|
& (A-I)^2 =
|
|
|
|
\begin{pmatrix}
|
|
|
|
0 & 0 & 0 & 0 & 1 \\
|
|
|
|
0 & 0 & 0 & 0 & 2 \\
|
|
|
|
0 & 0 & 0 & 0 & 0 \\
|
|
|
|
0 & 0 & 0 & 0 & 0 \\
|
|
|
|
0 & 0 & 0 & 0 & 0
|
|
|
|
\end{pmatrix}
|
2022-06-13 11:29:12 +02:00
|
|
|
\implies \ker((A-I)^2) = \linspan{ (\underbrace{e_1, e_2}_{B_1}, \underbrace{e_3, e_4}_{B_2}) } \\
|
2022-04-28 10:33:22 +02:00
|
|
|
& (A-I)^3 = 0 \implies \ker((A-I)^3) =
|
2022-06-13 11:29:12 +02:00
|
|
|
\linspan{(\underbrace{e_1, e_2}_{B_1}, \underbrace{e_3, e_4}_{B_2}, \underbrace{e_5}_{B_3}) } \\
|
2022-04-28 10:33:22 +02:00
|
|
|
& B_1 = (e_1, e_2), B_2 = (e_3, e_4), B_3 = (e_5)
|
2022-04-27 23:03:56 +02:00
|
|
|
\end{align*}
|
2022-04-28 10:33:22 +02:00
|
|
|
\begin{align*}
|
|
|
|
\begin{rcases}
|
|
|
|
v_1^3 = e_5
|
|
|
|
\end{rcases}
|
|
|
|
D_3 \\
|
|
|
|
\begin{rcases}
|
2023-01-31 13:30:38 +01:00
|
|
|
v_1^2 = (A-1I)(v_1^3) =
|
|
|
|
\begin{pmatrix}
|
|
|
|
-4 \\
|
|
|
|
-3 \\
|
|
|
|
2 \\
|
|
|
|
-1 \\
|
|
|
|
0
|
|
|
|
\end{pmatrix}
|
|
|
|
\\
|
2022-04-28 10:33:22 +02:00
|
|
|
v_2^2 = e_4
|
|
|
|
\end{rcases}
|
|
|
|
D_2 \\
|
|
|
|
\begin{rcases}
|
2023-01-31 13:30:38 +01:00
|
|
|
v_1^1 = (A - I)(v_1^2) =
|
|
|
|
\begin{pmatrix}
|
|
|
|
1 \\
|
|
|
|
2 \\
|
|
|
|
0 \\
|
|
|
|
0 \\
|
|
|
|
0
|
|
|
|
\end{pmatrix}
|
|
|
|
\\
|
|
|
|
v_2^1 = (A - I)(v_2^2) =
|
|
|
|
\begin{pmatrix}
|
|
|
|
3 \\
|
|
|
|
-2 \\
|
|
|
|
0 \\
|
|
|
|
0 \\
|
|
|
|
0
|
|
|
|
\end{pmatrix}
|
2022-04-28 10:33:22 +02:00
|
|
|
\end{rcases}
|
|
|
|
D_1
|
2022-04-27 23:03:56 +02:00
|
|
|
\end{align*}
|
|
|
|
\begin{align*}
|
2022-04-28 10:33:22 +02:00
|
|
|
(\overset{\mathrlap{\rotatebox{30}{\scriptsize$\in\ker(A-I)$}}}{v_1^1}
|
2023-01-31 13:30:38 +01:00
|
|
|
\underset{\mathclap{\substack{\rotatebox{180}{$\curvearrowright$} \\
|
|
|
|
A-I}}}{,}
|
2022-04-28 10:33:22 +02:00
|
|
|
v_1^2
|
2023-01-31 13:30:38 +01:00
|
|
|
\underset{\mathclap{\substack{\rotatebox{180}{$\curvearrowright$} \\
|
|
|
|
A-I}}}{,}
|
2022-04-28 10:33:22 +02:00
|
|
|
v_1^3,
|
|
|
|
\overset{\mathrlap{\rotatebox{30}{\scriptsize$\in\ker(A-I)$}}}{v_1^1}
|
2023-01-31 13:30:38 +01:00
|
|
|
\underset{\mathclap{\substack{\rotatebox{180}{$\curvearrowright$} \\
|
|
|
|
A-I}}}{,}
|
2022-04-28 10:33:22 +02:00
|
|
|
v_2^2) = B, {}_B M(A)_B =
|
|
|
|
\begin{pmatrix}
|
|
|
|
1 & 1 & 0 & 0 & 0 \\
|
|
|
|
0 & 1 & 1 & 0 & 0 \\
|
|
|
|
0 & 0 & 1 & 0 & 0 \\
|
|
|
|
0 & 0 & 0 & 1 & 1 \\
|
|
|
|
0 & 0 & 0 & 0 & 1
|
2023-01-31 13:30:38 +01:00
|
|
|
\end{pmatrix}
|
|
|
|
\\
|
2022-04-28 10:33:22 +02:00
|
|
|
P =
|
|
|
|
\begin{pmatrix}
|
|
|
|
1 & 4 & 0 & 3 & 0 \\
|
|
|
|
2 & -3 & 0 & -2 & 0 \\
|
|
|
|
0 & 2 & 0 & 0 & 0 \\
|
|
|
|
0 & 1 & 0 & 0 & 1 \\
|
|
|
|
0 & 0 & 1 & 0 & 0
|
|
|
|
\end{pmatrix}
|
|
|
|
\implies P^{-1} A P =
|
|
|
|
\begin{pmatrix}
|
|
|
|
\tl1 & 1 & 0 & 0 & 0 \\
|
|
|
|
0 & 1 & 1 & 0 & 0 \\
|
|
|
|
0 & 0 & 1\br & 0 & 0 \\
|
|
|
|
0 & 0 & 0 & \tl1 & 1 \\
|
|
|
|
0 & 0 & 0 & 0 & 1\br
|
|
|
|
\end{pmatrix}
|
2022-04-27 23:03:56 +02:00
|
|
|
\end{align*}
|
2022-04-28 10:33:22 +02:00
|
|
|
|
|
|
|
\chapter{Euklidische und Unitäre Vektorräume}
|
2022-05-01 23:06:12 +02:00
|
|
|
\subsubsection{Motivation}
|
|
|
|
Wir wollen Geometrie betreiben und Längen beziehungsweise Winkel messen können.
|
2022-04-28 18:22:39 +02:00
|
|
|
\subsubsection{Länge}
|
|
|
|
\begin{tikzpicture}[scale=4]
|
2022-04-28 23:58:50 +02:00
|
|
|
\draw [-latex, very thick] (0, 0) -- (1.3, 1);
|
|
|
|
\draw [dashed] (0, 0) -- (1.3, 0) -- (1.3, 1);
|
|
|
|
\node [below] at (0.65, 0) {$x_2 - x_1$};
|
|
|
|
\node [right] at (1.3, 0.5) {$y_2 - y_1$};
|
|
|
|
\node [below left] at (0, 0) {$(x_1, y_1)$};
|
|
|
|
\node [above right] at (1.3, 1) {$(x_2, y_2)$};
|
|
|
|
\draw (1.1, 0) -- (1.1, 0.2) -- (1.3, 0.2);
|
|
|
|
\draw [fill] (0, 0) circle [radius=0.02];
|
2022-04-28 18:22:39 +02:00
|
|
|
\end{tikzpicture}
|
|
|
|
|
2023-03-28 11:46:57 +02:00
|
|
|
\( \R^2\colon P_1 = (x_1, y_1), P_2 = (x_2, y_2) \) \\
|
2022-06-13 10:53:40 +02:00
|
|
|
\( d(P_1, P_2) = \sqrt{(x_2 - x_1)^2 + (y_2 - y_1)^2} = \abs{ \vect{P_1P_2} } \)
|
2022-04-28 18:22:39 +02:00
|
|
|
|
|
|
|
\subsubsection{Winkel}
|
|
|
|
\begin{tikzpicture}[scale=0.7]
|
2022-04-28 23:58:50 +02:00
|
|
|
\coordinate (a) at (0, 0);
|
|
|
|
\coordinate (b) at (5, 6);
|
|
|
|
\coordinate (c) at (8, 4);
|
|
|
|
\draw [fill] (a) circle [radius=0.07];
|
|
|
|
\draw [very thick, ->] (a) -- (b);
|
|
|
|
\draw [very thick, ->] (a) -- (c);
|
|
|
|
\node [below left] at (a) {$p$};
|
|
|
|
\node [right] at (b) {$v_2$};
|
|
|
|
\node [right] at (c) {$v_2$};
|
|
|
|
\draw pic [draw, thick, angle radius=3cm, pic text=$\alpha$] {angle=c--a--b};
|
2023-01-31 13:30:38 +01:00
|
|
|
\end{tikzpicture}
|
|
|
|
\\
|
2022-04-28 18:22:39 +02:00
|
|
|
$v_1 = (u_1, w_1), v_2 = (u_2, w_2), v= (u, w)$ \\
|
2022-06-13 10:53:40 +02:00
|
|
|
$\cos(\alpha) = \dfrac{u_1 u_2 + w_1 w_2}{\abs{w_1} \abs{w_2}}$
|
|
|
|
$\abs{ v } = \sqrt{u^2 + w^2}$ \\
|
2022-04-28 18:22:39 +02:00
|
|
|
$v_1 \cdot v_2 = u_1 u_2 + w_1 w_2$ skalares Produkt \\
|
2022-04-28 23:58:50 +02:00
|
|
|
$\implies d(P_1, P_2) = \sqrt{\vect{P_1 P_2} \cdot \vect{P_1 P_2}}, \cos(\sphericalangle{v_1 v_2}) =
|
2022-06-13 10:53:40 +02:00
|
|
|
\dfrac{v_1 v_2}{\abs{v_1}\abs{v_2}}$
|
2022-04-28 10:33:22 +02:00
|
|
|
|
|
|
|
\section[Skalarprodukte und Hermitesche Formen]{Skalarprodukte und Hermitesche \\ Formen}
|
|
|
|
Zunächst sei \( \K = \R \)
|
|
|
|
|
|
|
|
\begin{defin}
|
2023-03-28 11:46:57 +02:00
|
|
|
Sei $V$ ein $\R$-Vektorraum und $\beta\colon V \times V \to \R$.
|
2022-04-28 10:33:22 +02:00
|
|
|
$\beta$ heißt
|
|
|
|
\begin{itemize}
|
2022-05-01 23:06:12 +02:00
|
|
|
\item \underline{bilinear} (Bilinearform) wenn $\forall u, v, w\in V, \lambda \in \R$:
|
2022-04-28 10:33:22 +02:00
|
|
|
\begin{align*}
|
|
|
|
\beta(u+v, w) = \beta(u, w) + \beta(v, w), \\
|
|
|
|
\beta(u, v+w)=\beta(u, v) + \beta(u, w), \\
|
|
|
|
\beta(\lambda u, v) = \lambda \beta(u, v) = \beta(u, \lambda v)
|
|
|
|
\end{align*}
|
2023-03-28 11:46:57 +02:00
|
|
|
\item \underline{symmetrisch} wenn $\forall u, v \in V\colon \beta(u, v) = \beta(v, u)$
|
|
|
|
\item \underline{positiv definit} wenn $\forall v \in V\setminus\{0\}\colon \beta(v, v) > 0$
|
2022-04-28 10:33:22 +02:00
|
|
|
\item \underline{skalares Produkt} wenn $\beta$ symmetrisch, positiv definit (spd) und bilinear ist.
|
|
|
|
\end{itemize}
|
|
|
|
\end{defin}
|
|
|
|
|
|
|
|
\subsubsection{Bemerkung}
|
|
|
|
$v=0\in V \implies 0 \cdot v = v \implies \beta(v, v) = \beta(0 \cdot v, v) = 0 \cdot \beta(v, v) = 0$
|
|
|
|
|
|
|
|
\subsubsection{Beispiele}
|
|
|
|
\begin{itemize}
|
2023-01-31 13:30:38 +01:00
|
|
|
\item Sei $V = \R^n$ und $v= (v_1, \dots, v_n), w= (w_1, \dots, w_n)$ \\ $\beta_1(v,
|
|
|
|
w) = \sum\limits_{i=1}^n v_i w_i= v^Tw$ ist symmetrische positiv definite
|
|
|
|
Bilinearform.
|
2023-03-28 11:46:57 +02:00
|
|
|
\item Sei $\dim(V) = n$ und $B=(b_1, \dots, b_n)$ Basis \\ Sei für $v, w \in V\colon
|
2023-01-31 13:30:38 +01:00
|
|
|
\begin{cases} {}_B \Phi(v) = (v_1, \dots, v_n) \\{}_B \Phi(w) = (w_1, \dots, w_n)
|
|
|
|
\end{cases}
|
|
|
|
$ \\
|
2022-05-01 23:06:12 +02:00
|
|
|
$\beta_2(v, w) = \sum\limits_{i=1}^n v_i w_i = \beta_1({}_B \Phi(v), {}_B \Phi(w))$ ist spd.
|
2023-01-31 13:30:38 +01:00
|
|
|
\item $V= \R^2, v=
|
|
|
|
\begin{pmatrix}
|
|
|
|
v_1 \\
|
|
|
|
v_2
|
|
|
|
\end{pmatrix}
|
|
|
|
,
|
|
|
|
w=
|
|
|
|
\begin{pmatrix}
|
|
|
|
w_1 \\
|
|
|
|
w_2
|
|
|
|
\end{pmatrix}
|
|
|
|
, A =
|
|
|
|
\begin{pmatrix}
|
|
|
|
4 & -2 \\
|
|
|
|
-2 & 3
|
|
|
|
\end{pmatrix}
|
|
|
|
$ \\
|
2022-04-28 10:33:22 +02:00
|
|
|
$\beta_3(v, w) = v^T A w \in \R$ \\
|
|
|
|
symmetrisch, weil
|
|
|
|
\[
|
|
|
|
\beta_3(v, w) = \beta_3(v, w)^T = (v^T A w)^T = w^T A^T v = w^T A v = \beta_3(w, v) \checkmark
|
|
|
|
\]
|
2022-05-07 23:36:49 +02:00
|
|
|
$\beta_3(u, v) = 4v_1w_1 - 2v_1w_2 - 2v_2w_1 + 3v_2 w_2 \implies
|
|
|
|
\beta(v, v) = (2 v_1 - v_2)^2 + 2 v_2^2 = 0
|
2022-04-28 10:33:22 +02:00
|
|
|
\implies v_2 = 0 \implies (2v_1)^2 = 0 \implies v_1 = 0$
|
2023-03-28 11:46:57 +02:00
|
|
|
\item Sei $a, b \in \R, a < b, V = \{f\colon[a, b] \to \R\colon f \text{ stetig}\}$
|
|
|
|
\\ Sei $h \in V\colon h(t) > 0 \forall t \in [a, b]$
|
2022-04-28 10:33:22 +02:00
|
|
|
\begin{align*}
|
|
|
|
& \beta_4(f, g) = \int_a^b f(t) g(t) h(t) dt \text{ bilinear, symmetrisch} \\
|
2022-06-13 10:53:40 +02:00
|
|
|
& \beta_4(f, f) = \int_a^b \abs{ f(t) } ^2 h(t) dt = 0 \implies f= 0
|
2022-04-28 10:33:22 +02:00
|
|
|
\end{align*}
|
|
|
|
\end{itemize}
|
|
|
|
|
|
|
|
\begin{defin}
|
|
|
|
Ein Vektorraum mit skalarem Produkt heißt \underline{Euklidischer Raum}.\\
|
2022-05-04 13:44:26 +02:00
|
|
|
Man schreibt oft $u \cdot v, \inner uv$ anstatt $\beta(u, v)$.
|
2022-04-28 10:33:22 +02:00
|
|
|
\end{defin}
|
|
|
|
|
|
|
|
Nun sei $\K = \C$
|
|
|
|
|
|
|
|
\begin{defin}
|
2023-03-28 11:46:57 +02:00
|
|
|
Sei $V$ ein \C-Vektorraum und $\beta\colon V \times V \to \C$. $\beta$ heißt \underline{hermitesche Form} wenn für
|
2022-04-28 10:33:22 +02:00
|
|
|
alle $u, v, w \in V, \lambda \in \C$:
|
|
|
|
\begin{enumerate}[label=\roman*)]
|
|
|
|
\item $\beta(u + v, w) = \beta(u, w) + \beta(v, w)$
|
|
|
|
\item $\beta(\lambda u, v) = \lambda \beta(u, v)$
|
2022-06-28 20:39:01 +02:00
|
|
|
\item $\beta(u, v) = \overline{\beta(v, u)}$
|
2022-04-28 10:33:22 +02:00
|
|
|
\end{enumerate}
|
|
|
|
\end{defin}
|
|
|
|
|
|
|
|
\begin{lemma}
|
2022-06-08 23:25:28 +02:00
|
|
|
|
2022-05-04 13:44:26 +02:00
|
|
|
\label{theo:3.1.4}
|
2022-04-28 10:33:22 +02:00
|
|
|
Sei $\beta$ hermitesche Form
|
|
|
|
\begin{enumerate}[label=\alph*)]
|
|
|
|
\item $\beta(u, v + w) = \beta(u, v) + \beta(u, w)$
|
|
|
|
\item $\beta(u, \lambda v) = \overline{\lambda} \beta(u, v)$
|
|
|
|
\item $\beta(u, u) \in \R$
|
|
|
|
\end{enumerate}
|
|
|
|
\end{lemma}
|
2022-06-08 23:25:28 +02:00
|
|
|
\begin{proof}
|
|
|
|
\begin{enumerate}[label=\alph*)]
|
|
|
|
\item $\beta(u, v+w) \overset{\text{iii}}{=} \overline{\beta(v+ w, u)} \overset{\text{i}}{=}
|
|
|
|
\overline{\beta(v, u)} + \overline{\beta(w, u)} \overset{\text{iii}}{=}\beta(u, v) + \beta(u, w) \checkmark$
|
|
|
|
\item $\beta(u, \lambda v) \overset{\text{iii}}{=} \overline{\beta(\lambda v, u)} \overset{\text{ii}}{=}
|
|
|
|
\overline{\lambda}\cdot\overline{\beta(v, u)} \overset{\text{iii}}{=} \overline{\lambda} \beta(u, v)$
|
|
|
|
\item $z = \overline{z} \iff z \in \R$ \\
|
|
|
|
$\beta(u, u) \overset{\text{iii}}{=} \overline{\beta(u, u)} \implies \beta(u, u) \in \R$
|
|
|
|
\end{enumerate}
|
|
|
|
\end{proof}
|
2022-04-28 10:33:22 +02:00
|
|
|
|
|
|
|
\begin{defin}
|
|
|
|
Sei $\beta$ hermitesche Form.
|
|
|
|
\begin{itemize}
|
2023-03-28 11:46:57 +02:00
|
|
|
\item $\beta$ heißt \underline{positiv definit} wenn $\forall v \in V\setminus\{0\}\colon
|
2022-04-28 10:33:22 +02:00
|
|
|
\underbrace{\beta(v, v)}_{\in\R} > 0$
|
|
|
|
\item Eine positiv definite hermitesche Form heißt \underline{skalares Produkt}
|
2023-01-31 13:30:38 +01:00
|
|
|
\item Ein komplexer Vektorraum mit einem skalaren Produkt heißt \underline{unitärer
|
|
|
|
Raum}.
|
2022-04-28 10:33:22 +02:00
|
|
|
\end{itemize}
|
|
|
|
\end{defin}
|
|
|
|
|
|
|
|
\subsubsection{Beispiel}
|
|
|
|
$V = \C^n, u = (u_1, \dots, u_n), v = (v_1, \dots, v_n)$ \\
|
|
|
|
$u \cdot v = \sum\limits_{i=1}^n u_i \overline{v_i}$ ist skalares Produkt
|
|
|
|
\par
|
2023-01-31 13:30:38 +01:00
|
|
|
Wir zeigen nun, dass jeder euklidische Vektorraum in einen unitären Vektorraum
|
|
|
|
eingebettet werden kann. \newpage
|
2022-04-28 10:33:22 +02:00
|
|
|
|
|
|
|
\begin{defin}
|
|
|
|
Sei $V$ ein \R-Vektorraum.
|
|
|
|
\begin{align*}
|
2023-03-28 11:46:57 +02:00
|
|
|
& V_\C := \{ (u, v)\colon u, v\in V \} \text{
|
2023-01-31 13:30:38 +01:00
|
|
|
[Schreibe $(u, v) =: u + \overset{\mathclap{\substack{i^2 = -1 \\
|
|
|
|
|}}}{i} \cdot v$]} \\
|
2022-04-28 10:33:22 +02:00
|
|
|
& (u_1, v_1) + (u_2, v_2) := (u_1 + u_2, v_1 + v_2) \text{ Addition} \\
|
|
|
|
& \lambda = (\gamma + i \delta) \in \C, \lambda \cdot (u, v) = (\gamma u - \delta v, \delta u + \gamma v)
|
|
|
|
\text{ skalare Multiplikation} \\
|
|
|
|
& \lambda(u + iv) = (\gamma + i \delta) (u + iv) = \gamma u + i \gamma v + i \delta u - \delta v \\
|
|
|
|
& \; \; =(\gamma u - \delta v) + i (\gamma v + \delta u) \\
|
|
|
|
& \implies (V_\C, +, \cdot) \text{ ist \C-Vektorraum}
|
|
|
|
\end{align*}
|
2022-05-10 23:29:08 +02:00
|
|
|
$V_\C$ heißt die \underline{komplexe Erweiterung} von V.
|
2022-04-28 10:33:22 +02:00
|
|
|
\end{defin}
|
|
|
|
|
|
|
|
\[
|
2023-03-28 11:46:57 +02:00
|
|
|
\text{Einbettung: }\iota\colon
|
2023-01-31 13:30:38 +01:00
|
|
|
\begin{cases}
|
|
|
|
V \to V_\C \\
|
|
|
|
v \mapsto (v, 0) = v + i\cdot 0
|
|
|
|
\end{cases}
|
2022-04-28 10:33:22 +02:00
|
|
|
\]
|
|
|
|
|
|
|
|
\begin{lemma}
|
|
|
|
\begin{enumerate}[label=\alph*)]
|
|
|
|
\item $V$ ist durch die Einbettung $v \overset{\iota_V}{\mapsto} (v, 0)$ \dq in $V_\C$ enthalten\dq, das heißt
|
2022-05-01 23:06:12 +02:00
|
|
|
$\iota_V$ ist injektiv.
|
2023-01-31 13:30:38 +01:00
|
|
|
\item Seien $V, W$ \R-Vektorräume, $\alpha \in \Hom_\R(V, W)$. Dann existiert eine
|
|
|
|
eindeutige komplexe Erweiterung $\alpha_\C \in \Hom_\C(V_\C, W_\C)$ mit
|
2022-05-01 23:06:12 +02:00
|
|
|
\[
|
2023-03-28 11:46:57 +02:00
|
|
|
\forall v \in V\colon \alpha_\C(\iota_V(v)) = \iota_W(\alpha(v))
|
2022-05-01 23:06:12 +02:00
|
|
|
\]
|
2022-04-28 10:33:22 +02:00
|
|
|
\end{enumerate}
|
|
|
|
\end{lemma}
|
2022-06-08 23:25:28 +02:00
|
|
|
\begin{proof}
|
|
|
|
\begin{enumerate}[label=\alph*)]
|
|
|
|
\item $\iota_V$ ist linear \checkmark \\
|
|
|
|
$\iota_V(v) = (0, 0) \implies v = 0$ (injektiv)
|
2023-01-31 13:30:38 +01:00
|
|
|
\item Sei $\alpha_\C$ so eine Fortsetzung \\ $\alpha_\C(u + iv) = \alpha_\C(u) + i
|
|
|
|
\alpha_\C(v) = \alpha(u) + i\alpha(v)$ \\ $\alpha_\C((u, v)) = (\alpha(u),
|
|
|
|
\alpha(v))$ Dadurch ist $\alpha_\C$ eindeutig bestimmt!
|
2022-06-08 23:25:28 +02:00
|
|
|
\end{enumerate}
|
|
|
|
\end{proof}
|
2022-04-28 10:33:22 +02:00
|
|
|
|
|
|
|
\begin{defin}
|
|
|
|
$\alpha_\C$ heißt die komplexe Forsetzung von $\alpha$
|
|
|
|
\end{defin}
|
|
|
|
|
2022-05-01 23:06:12 +02:00
|
|
|
Auch skalare Produkte können eindeutig fortgesetzt werden.
|
|
|
|
|
|
|
|
\begin{satz}
|
2022-06-08 23:25:28 +02:00
|
|
|
|
2023-01-31 13:30:38 +01:00
|
|
|
Sei $(V, \beta)$ euklidischer \R-Vektorraum. Dann existiert genau eine
|
|
|
|
hermitesche Form $\beta_\C$ auf $V_\C$, welche $\beta$ fortsetzt:
|
2022-05-01 23:06:12 +02:00
|
|
|
\[
|
2023-03-28 11:46:57 +02:00
|
|
|
\forall v, w \in V\colon \beta_\C(\iota_V(v), \iota_V(w)) = \beta(v, w)
|
2022-05-01 23:06:12 +02:00
|
|
|
\]
|
|
|
|
\end{satz}
|
2022-06-08 23:25:28 +02:00
|
|
|
\begin{proof}
|
|
|
|
Ein solches $\beta_\C$ muss erfüllen, dass
|
|
|
|
\begin{align*}
|
|
|
|
\beta_\C(u_1 + i v_1, u_2 + i v_2) & = \beta_\C(u_1, u_2 + i v_2) + i \beta_\C(v_1, u_2+iv_2) \\
|
|
|
|
& = \beta_\C(u_1, u_2) + i \beta_\C(v_1, u_2)
|
2023-01-31 13:30:38 +01:00
|
|
|
\underset{\mathclap{\substack{| \\
|
|
|
|
\text{\ref{theo:3.1.4} b)}}}}{-}
|
2022-06-08 23:25:28 +02:00
|
|
|
i \beta_\C(u_1, v_2) + \beta_\C(v_1, v_2) \\
|
|
|
|
& = \beta(u_1, u_2) + \beta(v_1, v_2) + i(\beta(v_1, u_2) - \beta(u_1, v_2))
|
|
|
|
\end{align*}
|
|
|
|
und dadurch ist $\beta_\C$ eindeutig bestimmt.
|
|
|
|
\end{proof}
|
2022-05-01 23:06:12 +02:00
|
|
|
|
2023-01-31 13:30:38 +01:00
|
|
|
\begin{satz}[Cauchy-Schwarz]
|
2022-05-01 23:06:12 +02:00
|
|
|
\label{theo:3.1.10}
|
|
|
|
Für $u, v$ in einem euklidischen/unitären Vektorraum $V$ gilt
|
|
|
|
\[
|
2022-06-28 20:39:01 +02:00
|
|
|
\abs{ \inner uv } ^2 \le \inner uu \cdot \inner vv
|
2022-05-01 23:06:12 +02:00
|
|
|
\]
|
|
|
|
Gleichheit gilt genau wenn $u, v$ linear abhängig sind.
|
|
|
|
\end{satz}
|
2022-06-08 23:25:28 +02:00
|
|
|
\begin{proof}
|
|
|
|
$v=0 \checkmark$ \\
|
|
|
|
$v \neq 0 \implies \inner vv >0$ \\
|
|
|
|
Sei $\lambda \in \C \implies$
|
|
|
|
\begin{align*}
|
|
|
|
0 & \le \inner{u - \lambda v}{ u - \lambda v } \\
|
|
|
|
& = \inner u {u - \lambda v} - \lambda \inner v {u-\lambda v} \\
|
|
|
|
& = \inner uu - \overline{\lambda} \inner uv - \lambda \inner vu +
|
2022-06-13 10:53:40 +02:00
|
|
|
\underbrace{\lambda \overline{\lambda}}_{=\abs{\lambda}^2} \inner vv
|
2022-06-08 23:25:28 +02:00
|
|
|
\end{align*}
|
|
|
|
Sei $\lambda := \dfrac{\inner uv}{\inner vv}, \overline{\lambda} =
|
|
|
|
\dfrac{\overline{\inner uv }}{\overline{\inner vv }}
|
|
|
|
=\dfrac{\inner vu}{\inner vv}$, so folgt
|
|
|
|
\begin{align*}
|
|
|
|
0 & \le \inner uu - \frac{\inner vu \inner uv }{\inner vv }
|
|
|
|
- \frac{\inner uv \inner vu }{\inner vv } +
|
|
|
|
\frac{\cancel{\inner vv } \inner uv \inner vu }
|
2022-06-13 10:53:40 +02:00
|
|
|
{\inner{v}{v}^{\cancel{2}}} \\
|
2022-06-13 10:52:46 +02:00
|
|
|
& = \inner uu - \frac{\abs{ \inner uv }^2}{\inner vv } \\
|
|
|
|
& \implies 0 \le \inner uu \inner vv - \abs{ \inner uv } ^2 \\
|
|
|
|
& \implies \inner uu \inner vv \ge \abs{ \inner uv }^2.
|
2022-06-08 23:25:28 +02:00
|
|
|
\end{align*}
|
|
|
|
Gleichheit gilt, wenn $\inner{u - \lambda v}{u - \lambda v} = 0$, also $u, v$ linear abhängig.
|
|
|
|
\end{proof}
|
2022-05-01 23:06:12 +02:00
|
|
|
|
|
|
|
\begin{defin}
|
|
|
|
Man nennt
|
|
|
|
\begin{itemize}
|
2022-05-04 14:14:57 +02:00
|
|
|
\item $\norm v := \sqrt{\inner vv }$ die \underline{Länge} oder die \underline{Norm} von
|
2022-05-01 23:06:12 +02:00
|
|
|
$v \in V$.
|
2022-05-04 14:14:57 +02:00
|
|
|
\item $\cos(\sphericalangle v w) := \dfrac{\inner vw }{\norm v \norm w }$ der
|
2022-05-04 13:44:26 +02:00
|
|
|
Kosinus des \underline{Winkels} zwischen $v, w \in V$. \\
|
|
|
|
(Wegen Satz \ref{theo:3.1.10} ist $\cos(\sphericalangle v w) \le 1$ und damit auch $\sphericalangle v w$
|
|
|
|
wohldefiniert!)
|
2022-05-04 14:14:57 +02:00
|
|
|
\item $v\in V$ heißt \underline{normiert} wenn $\norm v = 1$
|
2022-05-01 23:06:12 +02:00
|
|
|
\end{itemize}
|
|
|
|
\end{defin}
|
|
|
|
|
2022-05-04 13:44:26 +02:00
|
|
|
\begin{satz}
|
2022-06-08 23:25:28 +02:00
|
|
|
|
2022-05-04 14:14:57 +02:00
|
|
|
$\norm \cdot $ ist eine \underline{Norm}, das heißt
|
2022-05-04 13:44:26 +02:00
|
|
|
\begin{enumerate}[label=\alph*)]
|
2022-05-04 14:14:57 +02:00
|
|
|
\item $\norm v \ge 0$
|
|
|
|
\item $\norm v = 0 \implies v = 0$
|
2022-06-13 10:53:40 +02:00
|
|
|
\item $\norm {\lambda v} = \abs{ \lambda } \norm v $
|
2022-05-04 14:14:57 +02:00
|
|
|
\item $\norm {v + w} \le \norm v + \norm w $ (Dreiecksungleichung)
|
2022-05-04 13:44:26 +02:00
|
|
|
\end{enumerate}
|
|
|
|
\end{satz}
|
2022-06-08 23:25:28 +02:00
|
|
|
\begin{proof}
|
|
|
|
\begin{enumerate}[label=\alph*)]
|
|
|
|
\item $\norm v = (\underbrace{\inner vv }_{\in [0, \infty)})^\frac12 \ge 0$
|
|
|
|
\item $\norm v = 0 \implies \norm{v}^2 = 0 \implies \inner vv = 0 \implies v = 0$
|
|
|
|
\item $\norm{\lambda v}^2 = \inner{\lambda v}{\lambda v}= \norm{\lambda}^2
|
2022-06-13 10:53:40 +02:00
|
|
|
\inner vv = \abs{ \lambda }^2 \norm{v}^2$
|
2023-01-31 13:30:38 +01:00
|
|
|
\item
|
|
|
|
\begin{align*}
|
2022-06-08 23:25:28 +02:00
|
|
|
\norm{u + v}^2 & = \inner{u + v}{u +v} \\
|
|
|
|
& = \inner u{u+v} + \inner v{u+v} = \inner uu + \inner uv +
|
|
|
|
\inner vu + \inner vv \\
|
|
|
|
& = \inner uu + \inner uv + \overline{\inner uv } + \inner vv \\
|
2022-06-22 10:05:40 +02:00
|
|
|
& = \inner uu + 2 \real(\inner uv ) + \inner vv \\
|
2022-06-13 10:53:40 +02:00
|
|
|
& \le \inner uu + 2 \abs{ \inner uv } + \inner vv \\
|
2022-06-08 23:25:28 +02:00
|
|
|
& \le \inner uu + 2 \norm u \norm v + \inner vv \\
|
|
|
|
& = \norm{u}^2 + 2 \norm u \norm v + \norm{v}^2
|
|
|
|
= (\norm u + \norm v )^2
|
|
|
|
\end{align*}
|
|
|
|
\end{enumerate}
|
|
|
|
\end{proof}
|
2022-05-04 13:44:26 +02:00
|
|
|
|
|
|
|
\begin{defin}
|
2023-03-28 11:46:57 +02:00
|
|
|
Sei $V = (v_1, \dots, v_k)$ mit $\forall i \in [k]\colon v_i \neq 0$.
|
2022-05-04 13:44:26 +02:00
|
|
|
\begin{itemize}
|
2022-05-04 14:28:04 +02:00
|
|
|
\item $v, w$ heißen \underline{orthogonal}, wenn $\inner vw = 0$ [schreibe auch $v \bot w$]
|
|
|
|
\item $V$ heißt \underline{Orthogonalsystem} (OS), wenn
|
2023-03-28 11:46:57 +02:00
|
|
|
$\forall i, j \in [k], i\neq j\colon v_i \bot v_j$
|
2022-05-04 14:28:04 +02:00
|
|
|
\item $V$ heißt \underline{Orthonormalsystem} (ONS), wenn $V$ ein Orthogonalsystem ist
|
2023-03-28 11:46:57 +02:00
|
|
|
und $\forall i \in [k]\colon \norm{v_i}= 1$
|
2022-05-04 14:28:04 +02:00
|
|
|
\item $V$ heißt \underline{Orthogonalbasis} (OB), wenn $V$ ein Orthogonalsystem und eine Basis ist.
|
|
|
|
\item $V$ heißt \underline{Orthonormalbasis} (ONB), wenn $V$ ein Orthonormalsystem und eine Basis ist.
|
2022-05-04 13:44:26 +02:00
|
|
|
\end{itemize}
|
|
|
|
\end{defin}
|
|
|
|
|
|
|
|
\begin{satz}
|
2022-06-08 23:25:28 +02:00
|
|
|
|
2023-01-31 13:30:38 +01:00
|
|
|
Sei $(v_1, \dots, v_k)$ ein Orthogonalsystem. Dann ist $(v_1, \dots, v_k)$
|
|
|
|
linear \\unabhängig.
|
2022-05-04 13:44:26 +02:00
|
|
|
\end{satz}
|
2022-06-08 23:25:28 +02:00
|
|
|
\begin{proof}
|
|
|
|
Angenommen $\lambda_1 v_1 + \dots + \lambda_k v_k = 0$
|
|
|
|
\[
|
2023-03-28 11:46:57 +02:00
|
|
|
\forall i \in [k]\colon 0 = \lambda_1 \underbrace{\inner {v_1}{v_i}}_{=0} + \dots +
|
2022-06-08 23:25:28 +02:00
|
|
|
\lambda_i \underbrace{\inner {v_i}{v_i}}_{\neq0} + \dots +
|
|
|
|
\lambda_k \underbrace{\inner {v_k}{v_i}}_{=0}
|
|
|
|
= \lambda_i \underbrace{\norm{v_i}^2}_{\neq 0}
|
|
|
|
\]
|
|
|
|
$\implies \lambda_i = 0$
|
|
|
|
\end{proof}
|
2022-05-04 13:44:26 +02:00
|
|
|
|
|
|
|
\begin{satz}
|
2022-06-08 23:25:28 +02:00
|
|
|
|
2022-05-19 09:42:55 +02:00
|
|
|
\label{theo:3.1.15}
|
2022-05-04 13:44:26 +02:00
|
|
|
Sei $B=(b_1, \dots, b_n)$ Orthonormalbasis von $V, n\in \mathbb{N}\cup \{\infty\}$.
|
2022-05-05 09:41:32 +02:00
|
|
|
Dann gilt für alle $v, w \in V$ und $(\lambda_1, \dots, \lambda_n) = {}_B \Phi(v), (\mu_1, \dots, \mu_n)
|
2022-05-04 13:44:26 +02:00
|
|
|
= {}_B\Phi(w)$:
|
|
|
|
\[
|
|
|
|
\inner vw = \sum_{i=1}^n \lambda_i \overline{\mu_i}
|
|
|
|
\]
|
2022-05-04 14:14:57 +02:00
|
|
|
Weiters gilt $\lambda_i = \inner{v}{b_i}, b_i^*(v) = \inner v {b_i}$
|
2022-05-04 13:44:26 +02:00
|
|
|
\end{satz}
|
2022-06-08 23:25:28 +02:00
|
|
|
\begin{proof}
|
|
|
|
\begin{align*}
|
|
|
|
& \inner{b_i}{b_j} = \delta_{ij} \\
|
2023-01-31 13:30:38 +01:00
|
|
|
&
|
|
|
|
\begin{rcases}
|
|
|
|
v = \sum_{i=1}^n \lambda_i b_i \\
|
|
|
|
w = \sum_{i=1}^n \mu_i b_i
|
|
|
|
\end{rcases}
|
|
|
|
\implies \inner vw
|
2022-06-08 23:25:28 +02:00
|
|
|
= \sum_{i, j = 1}^n \inner{\lambda_i b_i}{\mu_j b_j} = \sum_{i, j = 1}^n \lambda_i \overline{\mu_j}
|
|
|
|
\inner{b_i}{b_j} = \sum_{i=1}^n \lambda_i \overline{\mu_i} \\
|
|
|
|
& {}_B \Phi(b_i) = (0, \dots, \overset{i}{1}, \dots, 0) \implies \inner v{b_i} = \sum_{j=1}^n \lambda_j
|
|
|
|
\delta_{ij} = \lambda_i
|
|
|
|
\end{align*}
|
|
|
|
\end{proof}
|
2022-05-04 13:44:26 +02:00
|
|
|
|
2023-01-31 13:30:38 +01:00
|
|
|
\begin{satz}[Gram-Schmidt Orthonormalisierungsverfahren]
|
2022-05-05 09:41:32 +02:00
|
|
|
\label{theo:3.1.16}
|
|
|
|
Sei $(a_1, a_2, \dots) \subseteq V$ linear unabhängig. Dann existiert genau ein Orthonormalsystem
|
|
|
|
$(b_1, b_2, \dots)$ mit
|
|
|
|
\begin{enumerate}[label=\roman*)]
|
2023-03-28 11:46:57 +02:00
|
|
|
\item $\forall k\colon \linspan{ a_1, \dots, a_k } = \linspan{b_1, \dots, b_k} =: U_k$
|
2023-01-31 13:30:38 +01:00
|
|
|
\item Die Basistransformationsmatrix $M_k$ zwischen der Basen $(a_1, \dots, a_k)$ und
|
|
|
|
$(b_1, \dots, b_k)$ von $U_k$ hat positive Determinante.
|
2022-05-05 09:41:32 +02:00
|
|
|
\end{enumerate}
|
|
|
|
\end{satz}
|
2022-06-08 23:25:28 +02:00
|
|
|
\begin{proof}
|
|
|
|
$b_1, b_2, \dots$ werden induktiv definiert.
|
|
|
|
\begin{itemize}
|
2023-01-31 13:30:38 +01:00
|
|
|
\item $b_1 = \frac{a_1}{\norm{a_1}}, M_1 =
|
|
|
|
\begin{pmatrix}
|
|
|
|
\frac{1}{\norm{a_1}}
|
|
|
|
\end{pmatrix}
|
|
|
|
$ \\
|
2022-06-08 23:25:28 +02:00
|
|
|
Eindeutigkeit: Sei $\tilde b_1$ mit i), ii) $\implies \tilde b_1 = c \cdot a_1, 1 = \norm{\tilde b_1}
|
2022-06-13 10:53:40 +02:00
|
|
|
= \norm{c \cdot a_1} = \abs{ c } \norm{a_1}$ \\
|
|
|
|
$ \implies \abs{ c } = \dfrac{1}{\norm{a_1}} \implies \tilde M_k =(c)$
|
2022-06-08 23:25:28 +02:00
|
|
|
\item $(b_1, \dots, b_n)$ schon konstruiert mit i), ii) \\
|
|
|
|
Sei $c_{n+1} := a_{n+1} - \sum\limits_{j=1}^n \inner{a_{n+1}}{b_j} b_j$
|
|
|
|
\begin{align*}
|
2023-03-28 11:46:57 +02:00
|
|
|
& \forall i \in [n]\colon \inner{c_{n+1}}{b_i} = \inner{a_{n+1}}{b_i} -
|
2022-06-08 23:25:28 +02:00
|
|
|
\sum\limits_{j=1}^n \inner{a_{n+1}}{b_j} \underbrace{\inner{b_j}{b_i}}_{\delta_{ij}} \\
|
|
|
|
& = \inner{a_{n+1}}{b_i}
|
2022-06-13 11:29:12 +02:00
|
|
|
- \inner{a_{n+1}}{b_i} = 0 \implies c_{n+1} \bot \linspan{ b_1, \dots, b_n }
|
2022-06-08 23:25:28 +02:00
|
|
|
\end{align*}
|
|
|
|
$b_{n+1} = \dfrac{c_{n+1}}{\norm{c_{n+1}}} \implies (b_1, \dots, b_{n+1})$ Orthonormalsystem mit \\
|
2022-06-13 11:29:12 +02:00
|
|
|
$\linspan{ b_1, \dots, b_n } = \linspan{a_1, \dots, a_n}$
|
2022-06-08 23:25:28 +02:00
|
|
|
\begin{align*}
|
|
|
|
& b_1 = \mu_{11} a_1 \\
|
|
|
|
& b_2 = \mu_{21} a_1 + \mu_{22} a_2 \\
|
|
|
|
& b_3 = \mu_{31} a_1 + \mu_{32} a_2 + \mu_{33} a_3 \\
|
|
|
|
& \vdots \\
|
|
|
|
& b_n = \mu_{n1} a_1 + \dots + \mu_{nn} a_n \\
|
|
|
|
& b_{n+1} = \mu_{n+1 1} a_1 + \dots + \mu_{n+1 n} a_n + \dfrac{1}{\norm{c_{n+1}}} a_{n+1} \\
|
|
|
|
& \implies \det(\mu_{ij}) = \det(M_n) \cdot \dfrac{1}{\norm{c_{n+1}}} > 0
|
|
|
|
\end{align*}
|
|
|
|
Eindeutigkeit: Sei $\tilde b_{n+1}$ ein weiterer Vektor mit i), ii)
|
|
|
|
\begin{align*}
|
2023-03-28 11:46:57 +02:00
|
|
|
& \implies \tilde b_{n+1} = \mu_1 b_1 + \dots + \mu_n b_n + \mu b_{n+1} \\
|
|
|
|
& \forall i \in [n]\colon 0 = \inner{\tilde b_{n+1}}{b_i} = \mu_i \implies \tilde b_{n+1} = \mu b_{n+1} \\
|
|
|
|
& 1 = \norm{\tilde b_{n+1}} = \abs{\mu} \norm{b_{n+1}} = \abs{\mu} \implies \abs{\mu} = 1 \\
|
2022-06-08 23:25:28 +02:00
|
|
|
& \det(\tilde M_{n+1}) = \det(M_n) \cdot \mu > 0 \implies \mu = 1 \land \tilde b_{n+1} = b_{n+1}
|
|
|
|
\end{align*}
|
|
|
|
\end{itemize}
|
|
|
|
\end{proof}
|
2022-05-05 09:41:32 +02:00
|
|
|
|
2022-05-05 11:26:11 +02:00
|
|
|
\begingroup
|
|
|
|
\allowdisplaybreaks
|
|
|
|
|
|
|
|
\subsubsection{Veranschaulichung im $\R^2$}
|
2022-06-10 00:29:02 +02:00
|
|
|
\begin{tikzpicture}[scale=3.79]
|
2022-05-05 11:26:11 +02:00
|
|
|
\tikzmath{
|
|
|
|
\a1 = 3;
|
|
|
|
\a2 = 1;
|
|
|
|
\a3 = 2;
|
|
|
|
\a4 = 2;
|
2022-05-05 12:20:05 +02:00
|
|
|
\norma1 = sqrt((\a1 * \a1 + \a2 * \a2));
|
2022-05-05 11:26:11 +02:00
|
|
|
\normeda1 = \a1 / \norma1;
|
|
|
|
\normeda2 = \a2 / \norma1;
|
2022-05-05 12:20:05 +02:00
|
|
|
\innerprod = (\normeda1 * \a3) + (\normeda2 * \a4);
|
2022-05-05 11:26:11 +02:00
|
|
|
\c1 = \a3 - (\innerprod * \normeda1);
|
|
|
|
\c2 = \a4 - (\innerprod * \normeda2);
|
|
|
|
\t1 = (\innerprod * \normeda1);
|
|
|
|
\t2 = (\innerprod * \normeda2);
|
2022-05-05 12:20:05 +02:00
|
|
|
\normc = sqrt((\c1 * \c1 + \c2 * \c2));
|
2022-05-05 11:26:11 +02:00
|
|
|
\b3 = \c1 / \normc;
|
|
|
|
\b4 = \c2 / \normc;
|
|
|
|
}
|
2022-05-05 12:26:34 +02:00
|
|
|
\draw [->] (0, 0) --node[above]{$a_1$} (\a1, \a2);
|
2022-05-05 11:26:11 +02:00
|
|
|
\draw [->] (0, 0) --node[above]{$a_2$} (\a3, \a4);
|
|
|
|
\draw [->, blue, thick] (0, 0) --node[below right]{$\inner{a_2}{b_1}b_1$} (\t1, \t2);
|
|
|
|
\draw [->, violet, very thick] (0, 0) --node[below right]{$\frac{a_1}{\norm{a_1}}=:b_1$} (\normeda1, \normeda2);
|
|
|
|
\draw [->, magenta, thick] (0, 0) --node[left]{$c_2:=a_2 - \inner{a_2}{b_1}b_1$} (\c1, \c2);
|
|
|
|
\draw [->, teal, very thick] (0, 0) --node[right]{$\frac{c_2}{\norm{c_2}}=:b_2$} (\b3, \b4);
|
|
|
|
\end{tikzpicture}
|
|
|
|
|
2022-05-05 09:41:32 +02:00
|
|
|
\subsubsection{Beispiel}
|
2023-01-31 13:30:38 +01:00
|
|
|
$V = \R^4, a_1 =
|
|
|
|
\begin{pmatrix}
|
|
|
|
4 \\
|
|
|
|
2 \\
|
|
|
|
-2 \\
|
|
|
|
-1
|
|
|
|
\end{pmatrix}
|
|
|
|
,
|
|
|
|
a_2 =
|
|
|
|
\begin{pmatrix}
|
|
|
|
2 \\
|
|
|
|
2 \\
|
|
|
|
-4 \\
|
|
|
|
-5
|
|
|
|
\end{pmatrix}
|
|
|
|
,
|
|
|
|
a_3 =
|
|
|
|
\begin{pmatrix}
|
|
|
|
0 \\
|
|
|
|
8 \\
|
|
|
|
-2 \\
|
|
|
|
-5
|
|
|
|
\end{pmatrix}
|
|
|
|
$
|
2022-05-05 09:41:32 +02:00
|
|
|
\begin{align*}
|
2023-01-31 13:30:38 +01:00
|
|
|
& b_1 = \frac{1}{\norm{a_1}} a_1 ,\; \norm{a_1} = (4^2 + 2^2 + 2^2 + 1^2)^{\frac 12} = \sqrt{25} = 5 \\
|
|
|
|
& = \frac 15
|
|
|
|
\begin{pmatrix}
|
|
|
|
4 \\
|
|
|
|
2 \\
|
|
|
|
-2 \\
|
|
|
|
-1
|
|
|
|
\end{pmatrix}
|
|
|
|
,\;
|
|
|
|
\inner{a_2}{b_1} = \frac 15
|
|
|
|
\begin{pmatrix}
|
|
|
|
2 \\
|
|
|
|
2 \\
|
|
|
|
-4 \\
|
|
|
|
-5
|
|
|
|
\end{pmatrix}
|
|
|
|
\cdot
|
|
|
|
\begin{pmatrix}
|
|
|
|
4 \\
|
|
|
|
2 \\
|
|
|
|
-2 \\
|
|
|
|
-1
|
|
|
|
\end{pmatrix}
|
|
|
|
= \frac 15 (8 + 4 + 8 + 5)^\frac 12 = \frac{25}5 \\
|
|
|
|
& c_2 = a_2 - \underbrace{\inner{a_2}{b_1}}_5 b_1 =
|
|
|
|
\begin{pmatrix}
|
|
|
|
2 \\
|
|
|
|
2 \\
|
|
|
|
-4 \\
|
|
|
|
-5
|
|
|
|
\end{pmatrix}
|
|
|
|
-
|
|
|
|
\begin{pmatrix}
|
|
|
|
4 \\
|
|
|
|
2 \\
|
|
|
|
-2 \\
|
|
|
|
-1
|
|
|
|
\end{pmatrix}
|
|
|
|
=
|
|
|
|
\begin{pmatrix}
|
|
|
|
-2 \\
|
|
|
|
0 \\
|
|
|
|
-2 \\
|
|
|
|
-4
|
|
|
|
\end{pmatrix}
|
|
|
|
\\
|
|
|
|
& \norm c_2 = (4 + 4 + 16) = \sqrt{24} \\
|
|
|
|
& \implies b_2 = \frac{1}{\sqrt{24}}
|
|
|
|
\begin{pmatrix}
|
|
|
|
-2 \\
|
|
|
|
0 \\
|
|
|
|
-2 \\
|
|
|
|
-4
|
|
|
|
\end{pmatrix}
|
|
|
|
\\
|
2022-05-16 16:44:37 +02:00
|
|
|
& c_3 = a_3 - \inner{a_3}{b_1} b_1 - \inner{a_3}{b_2} b_2 = \dots =
|
2023-01-31 13:30:38 +01:00
|
|
|
\begin{pmatrix}
|
|
|
|
-2 \\
|
|
|
|
6 \\
|
|
|
|
2 \\
|
|
|
|
0
|
|
|
|
\end{pmatrix}
|
|
|
|
\\
|
|
|
|
& \norm{c_3} = (4 + 36 + 4)^\frac 12 = \sqrt{44} \\
|
|
|
|
& \implies b_3 = \frac 1{\sqrt{44}}
|
|
|
|
\begin{pmatrix}
|
|
|
|
-2 \\
|
|
|
|
6 \\
|
|
|
|
2 \\
|
|
|
|
0
|
|
|
|
\end{pmatrix}
|
2022-05-05 09:41:32 +02:00
|
|
|
\end{align*}
|
|
|
|
|
2022-05-05 11:26:11 +02:00
|
|
|
\endgroup
|
|
|
|
|
2022-05-05 09:41:32 +02:00
|
|
|
\begin{satz}
|
2022-06-08 23:25:28 +02:00
|
|
|
|
2022-05-18 15:02:32 +02:00
|
|
|
\label{theo:3.1.17}
|
2022-05-12 09:32:32 +02:00
|
|
|
Sei $V$ ein euklidischer/unitärer Vektorraum mit höchstens abzählbarer Dimension.
|
2022-05-11 11:47:48 +02:00
|
|
|
Dann kann jedes Orthonormalsystem zu einer Orthonormalbasis von $V$ ergänzt werden.
|
2022-05-05 09:41:32 +02:00
|
|
|
\end{satz}
|
2022-06-08 23:25:28 +02:00
|
|
|
\begin{proof}
|
2022-06-28 20:39:01 +02:00
|
|
|
Sei $(b_1, \dots, b_k)$ ein Orthonormalsystem, $(b_1, \dots, b_k, a_{k+1}, \dots)$ eine Basis. \\
|
|
|
|
Satz~\ref{theo:3.1.16} $\implies \exists b_{k+1}, b_{k+2}, \dots$ mit $(b_1, \dots, b_k, b_{k+1}, \dots)$
|
2022-06-08 23:25:28 +02:00
|
|
|
Orthonormalbasis.
|
|
|
|
\end{proof}
|
2022-05-05 09:41:32 +02:00
|
|
|
|
2022-05-11 11:47:48 +02:00
|
|
|
\begin{defin}
|
|
|
|
\begin{itemize}
|
2023-03-28 11:46:57 +02:00
|
|
|
\item $M, N \subseteq V$ heißen \underline{orthogonal} wenn $\forall v \in M, w \in N\colon
|
2022-05-11 11:47:48 +02:00
|
|
|
\underset{\inner vw = 0}{v \bot w}$ \\
|
|
|
|
Wir schreiben $M \bot N$ \\
|
|
|
|
$[M = {v} \implies v \bot N]$
|
|
|
|
\item Für $M \subseteq V$ heißt
|
|
|
|
\[
|
2023-03-28 11:46:57 +02:00
|
|
|
M^\bot := \{ v\in V\colon v \bot M \} = \{ v \in V\colon \forall w \in M\colon \inner vw = 0 \}
|
2022-05-11 11:47:48 +02:00
|
|
|
\]
|
|
|
|
\underline{orthogonales Komplement} von M
|
|
|
|
\end{itemize}
|
|
|
|
\end{defin}
|
|
|
|
|
|
|
|
\subsubsection{Bemerkung}
|
2022-06-28 20:39:01 +02:00
|
|
|
$M^\bot$ ist immer Unterraum von $V$. Selbst, wenn $M$ kein Unterraum ist.
|
2022-05-11 11:47:48 +02:00
|
|
|
|
|
|
|
\begin{satz}
|
|
|
|
Sei $U$ $r$-dimensionaler Unterraum von $n$-dimensionalem euklidischen/\\ unitären Vektorraum $V$. Dann gilt:
|
|
|
|
\begin{enumerate}[label=\alph*)]
|
|
|
|
\item $\dim(U^\bot) = n - r$
|
|
|
|
\item $(U^\bot)^{{}^\bot} = U$
|
|
|
|
\item $V = U \oplus U^\bot$
|
|
|
|
\end{enumerate}
|
|
|
|
\end{satz}
|
2022-06-08 23:25:28 +02:00
|
|
|
\begin{proof}
|
|
|
|
\begin{enumerate}[label=\alph*)]
|
|
|
|
\item $(b_1, \dots, b_r)$ Orthonormalbasis von $U$.\\
|
|
|
|
$(b_1, \dots, b_r, b_{r+1}, \dots, b_n)$ Orthonormalbasis von $V$. \newline
|
|
|
|
[die existiert laut Satz \ref{theo:3.1.17}] \\
|
2022-06-13 11:29:12 +02:00
|
|
|
Behauptung: $U^\bot = \linspan{ b_{r+1}, \dots, b_n }$ \\
|
2022-06-08 23:25:28 +02:00
|
|
|
Beweis: $\subseteq$: Sei $v\in U^\bot, v = \sum_{i=1}^n \lambda_i b_i$
|
|
|
|
\[
|
2023-03-28 11:46:57 +02:00
|
|
|
\forall i \in [r]\colon 0 = \inner v{b_i} = \inner{\sum_{j=1}^n \lambda_j b_j}{b_i} =
|
2022-06-08 23:25:28 +02:00
|
|
|
\sum_{j=1}^n \lambda_j \underbrace{\inner{b_j}{b_i}}_{\delta_{ij}} = \lambda_i
|
|
|
|
\]
|
2023-03-28 11:46:57 +02:00
|
|
|
$\supseteq\colon v\in \linspan{ b_{r+1}, \dots, b_n } \overset{!}{\implies} v \in U^\bot$ \\
|
2022-06-08 23:25:28 +02:00
|
|
|
$\implies \sum_{j=r+1}^n \lambda_j b_j, u = \sum_{i=1}^r \mu_i b_i \in U$ \\
|
|
|
|
$\implies \inner vu = \sum_{j=r+1}^n \lambda_j \sum_{i=1}^r \underbrace{\inner{b_j}{b_i}}_{=0}=0$ \\
|
|
|
|
$\implies$ a)
|
2022-06-28 20:39:01 +02:00
|
|
|
\item \leavevmode
|
|
|
|
\begin{itemize}
|
|
|
|
\item[$U \subseteq (U^\bot)^{{}^\bot}$:]
|
2023-03-28 11:46:57 +02:00
|
|
|
Sei $v \in U \implies \forall w \in U^\bot\colon \inner wv = 0 \implies v \in (U^\bot)^{{}^\bot}$
|
2022-06-28 20:39:01 +02:00
|
|
|
\item[$(U^\bot)^{{}^\bot} \subseteq U$:]
|
|
|
|
$\dim((U^\bot)^{{}^\bot}) \overset{\text{a)}}{=} n - \dim(U^\bot)
|
|
|
|
\overset{\text{a)}}{=} n - (n-r) = r = \dim(U)$
|
|
|
|
\end{itemize}
|
2022-06-08 23:25:28 +02:00
|
|
|
\item $U\oplus U^\bot$: Sei $w\in U \cap U^\bot \implies \inner ww = 0 \implies w = 0$
|
|
|
|
\end{enumerate}
|
|
|
|
\end{proof}
|
2022-05-11 11:47:48 +02:00
|
|
|
|
|
|
|
\subsubsection{Bemerkung}
|
2022-06-28 20:39:01 +02:00
|
|
|
$U = (U^\bot)^{{}^\bot}$ gilt im Allgemeinen nicht, wenn $\dim(V) = \infty$.
|
2022-05-11 11:47:48 +02:00
|
|
|
|
|
|
|
\subsubsection{Beispiel}
|
2023-03-28 11:46:57 +02:00
|
|
|
$V = \{ f\colon [0, 1] \to \R, \text{ stetig} \}, \inner fg = \int_0^1 f(t) g(t) dt$\\
|
|
|
|
$U = \{ p \in V\colon p \text{ ist Polynom}\}$\\
|
2022-05-11 11:47:48 +02:00
|
|
|
$(p_1, p_2, \dots)$ ist eine Orthonormalbasis von $U$.\\
|
2022-05-12 09:32:32 +02:00
|
|
|
Wir zeigen: $U^\bot = \{0\} \implies (U^\bot)^{{}^\bot} =V\neq U$
|
2022-06-11 12:51:34 +02:00
|
|
|
\begin{nonumbersatz}[Weierstraß]
|
2023-03-28 11:46:57 +02:00
|
|
|
$\forall f \in V, \varepsilon > 0 \; \exists p \in U\colon \norm{f-p}_\infty \le \varepsilon$\\
|
2022-05-12 09:32:32 +02:00
|
|
|
Beweis wird hier nicht geführt.
|
2022-06-11 12:51:34 +02:00
|
|
|
\end{nonumbersatz}
|
2022-05-11 11:47:48 +02:00
|
|
|
\par
|
|
|
|
Sei $f \in V \setminus \{0\}, a := \norm f^2 = \inner ff, b = \norm f _\infty$.
|
2023-03-28 11:46:57 +02:00
|
|
|
Sei $p \in U\colon \norm{f-p}_\infty < \frac a {2b}$ \\ Behauptung: $\inner fp
|
|
|
|
> 0$
|
2022-05-11 11:47:48 +02:00
|
|
|
\begin{align*}
|
|
|
|
\inner fp = \int_0^1 f(t)p(t) dt & = \int_0^1 f(t)[f(t) - (f(t) - p(t))]dt \\
|
|
|
|
& = \int_0^1 f(t)f(t)dt - \int_0^1 f(t)(f(t) - p(t)) dt \\
|
|
|
|
& = a - \int_0^1 f(t)[f(t)-p(t)] dt \\
|
2023-01-31 13:30:38 +01:00
|
|
|
\int_0^1 \underbrace{f(t)}_{\substack{\le \norm b_\infty \\
|
|
|
|
\le b}}
|
|
|
|
[\underbrace{f(t) - p(t)}_{\substack{\le \norm{f - p}_\infty \\
|
|
|
|
\le \frac a {2b}}}]
|
2022-05-11 11:47:48 +02:00
|
|
|
& \le \int_0^1 b \cdot \frac a {2b}dt -
|
|
|
|
\int_0^1 \frac a2 dt = \frac a2 \\
|
2023-03-28 11:46:57 +02:00
|
|
|
\forall f \in V\colon \exists p \in U\colon \int_0^1 f(t)p(t)dt
|
2022-05-11 11:47:48 +02:00
|
|
|
& \neq 0 \implies U^\bot = \{\} \implies U \subsetneq (U^\bot)^{{}^\bot}
|
|
|
|
\end{align*}
|
|
|
|
|
|
|
|
\section[Adjungierte Abbildungen und normale Endomorphismen]
|
|
|
|
{Adjungierte Abbildungen und normale \\Endomorphismen}
|
|
|
|
\begin{defin}
|
|
|
|
Seien $V, W$ euklidische/unitäre Vektorräume, $\alpha \in \homk(V, W), \K \in \{\R, \C\}$. \\
|
|
|
|
$\alpha^* \in \homk(W, V)$ heißt \underline{zu $\alpha$ adjungiert}, falls
|
|
|
|
\[
|
2023-03-28 11:46:57 +02:00
|
|
|
\forall v \in V\colon \forall w \in W\colon \inner{\alpha(v)}{w}_W = \inner{v}{\alpha^*(w)}_V
|
2022-05-11 11:47:48 +02:00
|
|
|
\]
|
|
|
|
$V=W$: Gilt $\genfrac{}{}{0pt}{0}{\alpha = -\alpha^*}{\alpha = \alpha^*}$,
|
2022-05-23 22:16:44 +02:00
|
|
|
so heißt $\alpha$ $\genfrac{}{}{0pt}{0}{\text{\underline{anti-selbstadjungiert}}}
|
2022-05-11 11:47:48 +02:00
|
|
|
{\text{\underline{selbstadjungiert}}}$.
|
|
|
|
\end{defin}
|
|
|
|
|
|
|
|
\subsubsection{Bemerkung}
|
|
|
|
\begin{itemize}
|
|
|
|
\item $\alpha^*$ muss nicht existieren! \\
|
2022-05-12 09:32:32 +02:00
|
|
|
Beispiel: $U, V$ wie vorher. $\alpha \in \Hom_\R(U, V), \alpha(p) = p \forall p \in U$ \\
|
|
|
|
Angenommen $\exists \alpha^* \in \Hom_\R(V, U), e(t) = e^t \implies e \in V$ \\
|
2022-05-11 11:47:48 +02:00
|
|
|
$\alpha^* (e) = a_1 p_1 + \dots + a_m p_m$ \\
|
|
|
|
$f := e - (a_1 p_1 + \dots + a_m p_m) = e- \alpha^*(e) \neq 0$ \\
|
|
|
|
Behauptung: $f \in U^\bot (\implies f = 0$ \Lightning)
|
|
|
|
\item $i \in \{m+1, m+2, \dots \}$ %Eigentlich nicht so wirklich ein Punkt?!
|
|
|
|
\begin{align*}
|
|
|
|
& \inner{e}{p_i} = \inner{e}{\alpha(p_i)} = \inner{\alpha^*(e)}{p_i} =
|
|
|
|
\inner{a_1 p_1 + \dots + a_m p_m}{p_i} = 0 \\
|
2023-03-28 11:46:57 +02:00
|
|
|
& \implies \forall i = m+1, m+2, \dots\colon \inner{f}{p_i} = 0
|
2022-05-11 11:47:48 +02:00
|
|
|
\end{align*}
|
2023-03-28 11:46:57 +02:00
|
|
|
Außerdem: $i\in [m]\colon \inner{e}{p_i} = \inner{\alpha^*}{p_i}$ \newline
|
2022-05-11 11:47:48 +02:00
|
|
|
$\implies \inner{f}{p_i} =
|
|
|
|
\inner{e - \alpha^*(e)}{p_i} = \inner{\alpha^*(e) - \alpha^*(e)}{p_i} = 0$ \\
|
|
|
|
$\implies \inner{f}{p_i} \forall i = 1, 2, \dots \implies f \in U^\bot \implies f= \{0\}$\Lightning
|
|
|
|
\item Wenn $\alpha^*$ existiert, dann ist $\alpha^*$ eindeutig.
|
|
|
|
\end{itemize}
|
|
|
|
|
|
|
|
\begin{lemma}
|
2022-06-08 23:25:28 +02:00
|
|
|
|
2023-01-31 13:30:38 +01:00
|
|
|
Sei $\alpha \in \homk(V, W), \dim(V) < \infty$ Dann existiert $\alpha^*$: \\
|
2022-05-12 09:32:32 +02:00
|
|
|
Mit $\{e_1, \dots, e_n\}$ Orthonormalbasis von $\underbrace{V}_{
|
2022-05-18 15:02:32 +02:00
|
|
|
\mathclap{\text{Existenz gegeben wegen Satz \ref{theo:3.1.17}}}}$ gilt:
|
2022-05-11 11:47:48 +02:00
|
|
|
\[
|
|
|
|
\alpha^*(w) := \sum_{i=1}^n \inner{w}{\alpha(e_i)} e_i
|
|
|
|
\]
|
|
|
|
\end{lemma}
|
2022-06-08 23:25:28 +02:00
|
|
|
\begin{proof}
|
2023-03-28 11:46:57 +02:00
|
|
|
Zu Zeigen: $\forall v \in V, w \in W\colon \inner{\alpha(v)}{w} = \inner{v}{\alpha^*(w)}$.\\
|
2022-06-08 23:25:28 +02:00
|
|
|
O.B.d.A.: $v = e_j$ für $j \in [n]$
|
|
|
|
\begin{align*}
|
|
|
|
\inner{v}{\alpha^*(w)} = \inner{e_j}{\sum_{i=1}^n \inner{w}{\alpha(e_i)}e_i}
|
|
|
|
& = \sum_{i=1}^n \inner{e_j}{\inner{w}{\alpha(e_i)} e_i} \\
|
|
|
|
& = \sum_{i=1}^n \overline{\inner{w}{\alpha(e_i)}} \underbrace{\inner{e_j}{e_i}}_{\delta_ij} \\
|
|
|
|
& = \overline{\inner{w}{\alpha(e_j)}} = \inner{\alpha(e_j)}{w} = \inner{\alpha(v)}{w}
|
|
|
|
\end{align*}
|
|
|
|
\end{proof}
|
2022-05-11 11:47:48 +02:00
|
|
|
|
|
|
|
\begin{defin}
|
|
|
|
Sei $A \in \C^{m \times n}$.
|
2022-05-12 09:32:32 +02:00
|
|
|
\begin{align*}
|
|
|
|
& \overline{A} := (\overline{a}_{ij})_{i,j} & & \text{ zu $A$ \underline{konjugiert komplexe} Matrix} \\
|
|
|
|
& A^* = (\overline{A})^T & & \text{ zu $A$ \underline{adjungierte} Matrix}
|
|
|
|
\end{align*}
|
2022-05-11 11:47:48 +02:00
|
|
|
\end{defin}
|
|
|
|
|
|
|
|
\begin{satz}
|
2022-06-08 23:25:28 +02:00
|
|
|
|
2022-05-19 09:42:55 +02:00
|
|
|
\label{theo:3.2.4}
|
2022-05-11 11:47:48 +02:00
|
|
|
Sei $\alpha \in \homk(V, W), \K \in \{\R, \C\}, \dim(V), \dim(W) < \infty$
|
|
|
|
\[
|
|
|
|
\begin{rcases}
|
|
|
|
E = \{e_1, \dots, e_n\} \text{ ONB von V} \\
|
|
|
|
F = \{f_1, \dots, f_n\} \text{ ONB von W}
|
|
|
|
\end{rcases}
|
|
|
|
\implies {}_E M(\alpha^*)_F = ({}_F M(\alpha)_E)^*
|
|
|
|
\]
|
2022-05-12 09:32:32 +02:00
|
|
|
\end{satz}
|
2022-06-08 23:25:28 +02:00
|
|
|
\begin{proof}
|
|
|
|
\begin{align*}
|
2023-01-31 13:30:38 +01:00
|
|
|
& A = {}_F M(\alpha)_E = (a_{ij})_{\substack{i=1,\dots,m \\
|
|
|
|
j=1,\dots,n}}, \,
|
|
|
|
B = {}_E M(\alpha^*)_F = (b_{ij})_{\substack{i=1,\dots,n \\
|
|
|
|
j=1,\dots,m}} \\
|
2022-06-08 23:25:28 +02:00
|
|
|
& \alpha(e_j) = \sum_{i=1}^m a_{ij} f_i \\
|
|
|
|
& F \text{ ONB} \implies a_{ij} = \inner{\alpha(e_j)}{f_i} \\
|
|
|
|
& \alpha^*(f_j) = \sum_{i=1}^n b_{ij} e_i \implies b_{ij} = \inner{\alpha^*(f_j)}{e_i} \\
|
|
|
|
& \dots = \overline{\inner{e_i}{\alpha^*(f_j)}} = \overline{\inner{\alpha(e_i)}{f_j}} = \overline{a_{ji}} \\
|
|
|
|
& \implies B = A^*
|
|
|
|
\end{align*}
|
|
|
|
\end{proof}
|
2022-05-12 09:32:32 +02:00
|
|
|
|
|
|
|
\begin{lemma}
|
|
|
|
\begin{enumerate}[label=\alph*)]
|
|
|
|
\item $(\alpha^*)^{{}^*} = \alpha$
|
|
|
|
\item $(\alpha + \beta)^* = \alpha^* + \beta^*$
|
|
|
|
\item $(\lambda \alpha)^* = \overline{\lambda}\alpha^*$
|
|
|
|
\item $(\beta \circ \alpha)^* = \alpha^* \circ \beta^*$
|
|
|
|
\item $\alpha \in \homkv, \dim(V) < \infty \implies \det(\alpha) = \overline{\det(\alpha^*)}$
|
|
|
|
\end{enumerate}
|
|
|
|
\end{lemma}
|
2022-06-08 23:25:28 +02:00
|
|
|
\begin{proof}
|
|
|
|
\begin{enumerate}[label=\alph*)]
|
|
|
|
\item $\inner{\alpha(v)}{w} = \inner{v}{\alpha^*(w)} = \overline{\inner{\alpha^*(w)}{v}} =$ \\
|
|
|
|
$\overline{\inner{w}{(\alpha^*)^{{}^*}(v)}} = \inner{(\alpha^*)^{{}^*}(v)}{w} \; \forall v \in V, w \in W$ \\
|
|
|
|
$\implies \inner{\alpha(v) - (\alpha^*)^{{}^*}(v)}{w} = 0 \; \forall v \in V, w \in W, \;
|
2022-10-05 18:46:19 +02:00
|
|
|
w:= \alpha(v) - (\alpha^*)^{{}^*}(v)$ \\
|
|
|
|
$\implies \inner{\alpha(v) - (\alpha^*)^{{}^*}(v)}{\alpha(v) - (\alpha^*)^{{}^*}(v)} = 0 \iff
|
2023-03-28 11:46:57 +02:00
|
|
|
\norm{\alpha(v) - (\alpha^*)^{{}^*}(v)} = 0 \implies \forall v \in V\colon \alpha(v) = (\alpha^*)^{{}^*}(v)$
|
2022-06-13 10:53:40 +02:00
|
|
|
\item
|
|
|
|
\begin{align*}
|
2022-10-05 18:46:19 +02:00
|
|
|
\inner{(\alpha + \beta)(v)}{w} & = \inner{v}{(\alpha + \beta)^*(w)} = \inner{\alpha(v) + \beta(v)}{w} \\
|
|
|
|
& = \inner{\alpha(v)}{w} + \inner{\beta(v)}{w} = \inner{v}{\alpha^*(w)} +
|
|
|
|
\inner{v}{\beta^*(w)} \\
|
|
|
|
& = \inner{v}{\alpha^*(w) + \beta^*(w)}
|
2022-06-13 10:53:40 +02:00
|
|
|
\end{align*}
|
2022-06-08 23:25:28 +02:00
|
|
|
\item $ \inner{(\lambda \alpha)(v)}{w} = \inner{v}{(\lambda \alpha)^*(w)} $\\
|
|
|
|
$ = \lambda \inner{\alpha(v)}{w} = \lambda \inner{v}{\alpha^*(w)} = \inner{v}{\overline{\lambda} \alpha^*(w)}$
|
|
|
|
\item $ \inner{\beta \circ \alpha(v)}{w} = \inner{\alpha(v)}{\beta^*(w)} = \inner{v}{\alpha^* \circ \beta^*(w)}$ \\
|
|
|
|
$ = \inner{v}{(\beta \circ \alpha)^*(w)} $
|
|
|
|
\item Sei $E$ Orthonormalbasis, $A = {}_E M(\alpha)_E = (a_{ij}) \in \C^{\nxn}$
|
|
|
|
\[
|
|
|
|
\overline{\det(\alpha)} = \sum_{\pi \in S_n} \sgn(\pi) \overline{a}_{1\pi(1)} \cdots \overline{a}_{n\pi(n)}
|
|
|
|
= \det(\overline{A}) = \det(\overline A^T) = \det(A^*)
|
|
|
|
\]
|
|
|
|
$ =\det({}_E M(\alpha^*)_E) = \det(\alpha^*) $ %Eigentlich noch in dem oberen environment
|
|
|
|
\end{enumerate}
|
|
|
|
\end{proof}
|
2022-05-12 09:32:32 +02:00
|
|
|
|
|
|
|
\begin{defin}
|
|
|
|
$\alpha \in \homkv$ mit $V$ euklidisch/unitär heißt \underline{normal}, wenn $\alpha^*$ existiert und
|
|
|
|
\[
|
|
|
|
\alpha \circ \alpha^* = \alpha^* \circ \alpha
|
|
|
|
\]
|
|
|
|
\end{defin}
|
|
|
|
|
|
|
|
\begin{satz}
|
2022-05-18 15:02:32 +02:00
|
|
|
\label{theo:3.2.7}
|
2022-05-12 09:32:32 +02:00
|
|
|
\[
|
|
|
|
\alpha \text{ normal} \iff \inner{\alpha(v)}{\alpha(w)} = \inner{\alpha^*(v)}{\alpha^*(w)}
|
|
|
|
\]
|
|
|
|
\end{satz}
|
2022-06-08 23:25:28 +02:00
|
|
|
\begin{proof}
|
|
|
|
\begin{itemize}
|
|
|
|
\item[$\implies$:]$\inner{\alpha(v)}{\alpha(w)} = \inner{v}{\alpha^*(\alpha(w))}
|
|
|
|
\underbrace{=}_{\alpha \text{ normal}} \inner{v}{\alpha(\alpha^*(w))}$ \\
|
2022-06-30 13:04:01 +02:00
|
|
|
$ \inner{\alpha^*(v)}{\alpha^*(w)} = \inner{v}{(\alpha^*)^{{}^*} (\alpha^*(w))} = \inner{v}{\alpha(\alpha^*(w))}$
|
2022-06-08 23:25:28 +02:00
|
|
|
\end{itemize}
|
|
|
|
\end{proof}
|
2022-05-12 09:32:32 +02:00
|
|
|
|
|
|
|
\begin{lemma}
|
2022-06-08 23:25:28 +02:00
|
|
|
|
2022-05-12 09:32:32 +02:00
|
|
|
\[
|
|
|
|
\alpha \text{ normal } \implies \ker(\alpha) = \ker(\alpha^*)
|
|
|
|
\]
|
|
|
|
\end{lemma}
|
2022-06-08 23:25:28 +02:00
|
|
|
\begin{proof}
|
|
|
|
\begin{align*}
|
|
|
|
& \norm{\alpha(v)}^2 = \inner{\alpha(v)}{\alpha(v)} = \inner{\alpha^*(v)}{\alpha^*(v)} = \norm{\alpha^*(v)}^2 \\
|
|
|
|
& v \in \ker(\alpha) \iff \alpha(v) = 0 \iff \norm{\alpha(v)} = 0 \iff \norm{\alpha^*(v)} = 0 \\
|
|
|
|
& \iff \alpha^*(v) = 0 \iff v \in \ker(\alpha^*)
|
|
|
|
\end{align*}
|
|
|
|
\end{proof}
|
2022-05-12 09:32:32 +02:00
|
|
|
|
|
|
|
\begin{satz}
|
2022-06-08 23:25:28 +02:00
|
|
|
|
2022-05-18 15:02:32 +02:00
|
|
|
\label{theo:3.2.9}
|
2022-05-12 09:32:32 +02:00
|
|
|
$\alpha$ normal:
|
|
|
|
\begin{enumerate}[label=\alph*)]
|
|
|
|
\item $\alpha$ und $\alpha^*$ besitzen die selben Eigenvektoren.
|
|
|
|
\item $v \in \eig_\alpha(\lambda)\implies v \in \eig_{\alpha^*}(\overline \lambda)$
|
|
|
|
\end{enumerate}
|
|
|
|
\end{satz}
|
2022-06-08 23:25:28 +02:00
|
|
|
\begin{proof}
|
|
|
|
$v \in \eig_\alpha(\lambda)$
|
|
|
|
\begin{align*}
|
|
|
|
\norm{\alpha(v) - \lambda v}^2 & = \inner{\alpha(v) - \lambda v}{\alpha(v) - \lambda v} \\
|
|
|
|
& = \inner{\alpha(v)}{\alpha(v)} - \lambda \inner{v}{\alpha(v)} -
|
|
|
|
\overline\lambda \inner{\alpha(v)}{v} + \lambda \overline \lambda
|
|
|
|
\inner vv \\
|
|
|
|
& \underbrace{=}_{\alpha\text{ normal}}
|
|
|
|
\inner{\alpha^*(v)}{\alpha^*(v)} - \lambda \overline{\inner{\alpha(v)}{v}}
|
|
|
|
-\overline{\lambda} \inner{v}{\alpha^*(v)} + \lambda \overline \lambda
|
|
|
|
\inner vv \\
|
|
|
|
& = \inner{\alpha^*(v)}{\alpha^*(v)} - \lambda \overline{\inner{v}{\alpha^*(v)}}
|
|
|
|
- \overline \lambda \inner{v}{\alpha^*(v)}
|
|
|
|
+ \lambda \overline \lambda \inner vv \\
|
|
|
|
& =\inner{\alpha^*(v)}{\alpha^*(v)} - \lambda \inner{\alpha^*(v)}{v}
|
|
|
|
- \overline \lambda \inner{v}{\alpha^*(v)} + \lambda \overline\lambda
|
|
|
|
\inner vv \\
|
|
|
|
& = \inner{\alpha^*(v) - \overline \lambda v}{\alpha^*(v) - \overline{\lambda}
|
|
|
|
v} = \norm{\alpha^*(v) - \overline \lambda v}^2
|
|
|
|
\end{align*}
|
|
|
|
\begin{align*}
|
|
|
|
& v \in \eig_\alpha(\lambda) \iff \alpha(v) - \lambda v = 0 \iff \norm{\alpha(v) - \lambda v}^2 = 0 \\
|
|
|
|
& \iff \norm{\alpha^*(v) - \overline \lambda v}^2 = 0 \iff \alpha^*(v) - \overline \lambda v = 0
|
|
|
|
\iff v \in \eig_{\alpha^*}(\overline\lambda)
|
|
|
|
\end{align*}
|
|
|
|
\end{proof}
|
2022-05-12 09:32:32 +02:00
|
|
|
|
2023-01-31 13:30:38 +01:00
|
|
|
\begin{satz}[Spektralsatz für normale Abbildungen]
|
2022-05-18 15:02:32 +02:00
|
|
|
\label{theo:3.2.10}
|
2022-05-12 09:32:32 +02:00
|
|
|
$\alpha \in \Hom_\C(V, V), V$ unitär mit $\dim(V) = n < \infty$. Dann gilt:
|
|
|
|
\[
|
|
|
|
\exists \text{ Orthonormalbasis aus Eigenvektoren von } \alpha \iff \alpha \text{ normal}
|
|
|
|
\]
|
2022-06-08 23:25:28 +02:00
|
|
|
\end{satz}
|
|
|
|
\begin{proof}
|
|
|
|
\begin{itemize}
|
|
|
|
\item[$\impliedby$:] $\alpha$ normal
|
|
|
|
\begin{itemize}
|
|
|
|
\item[$n=1$:] $\exists$ Eigenvektor $e_1 \in V \setminus\{0\}$ mit $\alpha(e_1) = \lambda e_1$.\\
|
|
|
|
o.B.d.A.: $\norm{e_1} = 1 \implies v$ ist Orthonormalbasis aus Eigenvektoren
|
|
|
|
\item[$n-1 \to n$:] $\exists$ Eigenvektor $e_1 \in V \setminus\{0\}$ mit $\alpha(e_1) = \lambda e_1$.\\
|
2022-06-30 13:04:01 +02:00
|
|
|
o.B.d.A.: $\norm{e_1} = 1, \; U= \linspan{ e_1 } ^\bot$
|
2022-06-08 23:25:28 +02:00
|
|
|
\begin{itemize}
|
2022-06-13 11:29:12 +02:00
|
|
|
\item $V = \linspan{ e_1 } \oplus U, \alpha(U) \overset{\text{!}}{\subseteq} U,
|
|
|
|
\alpha(\linspan{e_1}) \overset{\checkmark}{\subseteq} \linspan{e_1 }$
|
2022-06-08 23:25:28 +02:00
|
|
|
\end{itemize}
|
2022-06-13 11:29:12 +02:00
|
|
|
$\implies \alpha = \alpha|_{\linspan{ e_1 }} \oplus \alpha|_U$ \\
|
2022-06-08 23:25:28 +02:00
|
|
|
Sei $v \in U\implies 0 = \inner{v}{e_1}
|
|
|
|
\;\;\; [e_1 \in \eig_\alpha(\lambda) \iff e_1 \in \eig_{\alpha^*}(\overline\lambda)]$
|
|
|
|
\begin{align*}
|
|
|
|
\inner{\alpha(v)}{e_1} & = \inner{v}{\alpha^*(e_1)} = \inner{v}{\overline \lambda e_1} \\
|
|
|
|
& = \lambda \inner{v}{e_1} = 0 \\
|
|
|
|
\implies \alpha(v) \in U & \implies \alpha(U) \subseteq U \checkmark \\
|
|
|
|
& \implies \alpha|_U \in \Hom(U, U), \dim(U) = n-1 \\
|
|
|
|
\end{align*}
|
|
|
|
\begin{align*}
|
2023-01-31 13:30:38 +01:00
|
|
|
& \overset{\mathclap{\substack{\text{Induktionsvorraussetzung} \\
|
|
|
|
|}}}
|
2022-06-08 23:25:28 +02:00
|
|
|
{\implies} \exists
|
|
|
|
\text{ ONB } (e_2, \dots, e_n) \text{ von $U$ aus Eigenvektoren von } \alpha \\
|
|
|
|
& \implies (e_1, \dots, e_n) \text{ ist ONB von $V$ aus Eigenvektoren von } \alpha
|
|
|
|
\end{align*}
|
|
|
|
\end{itemize}
|
2022-06-09 11:00:51 +02:00
|
|
|
\item[$\implies$:] Sei $(e_1, \dots, e_n)$ Orthonormalbasis aus Eigenvektoren von $\alpha$. Seien \\
|
|
|
|
weiters $\lambda_1, \dots, \lambda_n \in \C$ die zugehörigen Eigenwerte.
|
2022-06-08 23:25:28 +02:00
|
|
|
\[
|
2023-03-28 11:46:57 +02:00
|
|
|
\alpha\colon
|
2023-01-31 13:30:38 +01:00
|
|
|
\begin{cases}
|
|
|
|
V & \to V \\
|
|
|
|
v = \sum_{i=1}^n \mu_i e_i & \mapsto \sum_{i=1}^n \lambda_i \mu_i e_i
|
|
|
|
\end{cases}
|
2022-06-08 23:25:28 +02:00
|
|
|
\]
|
|
|
|
Definiere
|
|
|
|
\[
|
2023-03-28 11:46:57 +02:00
|
|
|
\beta\colon
|
2023-01-31 13:30:38 +01:00
|
|
|
\begin{cases}
|
|
|
|
V & \to V \\
|
|
|
|
v = \sum_{i=1}^n \mu_i e_i & \mapsto \sum_{i=1}^n \overline \lambda_i
|
|
|
|
\mu_i e_i
|
|
|
|
\end{cases}
|
|
|
|
\implies \beta = \alpha^*
|
2022-06-08 23:25:28 +02:00
|
|
|
\]
|
|
|
|
\[
|
|
|
|
\begin{aligned}
|
|
|
|
\alpha^*(\alpha(v)) & = \alpha^*( \sum_{i=1}^n \lambda_i \mu_i e_i )
|
|
|
|
= \sum_{i=1}^n \overline\lambda_i \lambda_i \mu_i e_i
|
2022-06-13 10:53:40 +02:00
|
|
|
= \sum_{i=1}^n \abs{ \lambda_i }^2 \mu_i e_i \\
|
2022-06-08 23:25:28 +02:00
|
|
|
\alpha(\alpha^*(v)) & = \alpha( \sum_{i=1}^n \overline\lambda_i \mu_i e_i )
|
|
|
|
= \sum_{i=1}^n \lambda_i \overline\lambda_i \mu_i e_i
|
2022-06-13 10:53:40 +02:00
|
|
|
= \sum_{i=1}^n \abs{ \lambda_i }^2 \mu_i e_i
|
2022-06-08 23:25:28 +02:00
|
|
|
\end{aligned}
|
|
|
|
\implies \alpha \text{ normal}
|
|
|
|
\]
|
|
|
|
\end{itemize}
|
|
|
|
\end{proof}
|
2022-05-11 11:47:48 +02:00
|
|
|
|
2022-05-18 12:23:41 +02:00
|
|
|
\subsubsection{Bemerkung}
|
2023-01-31 13:30:38 +01:00
|
|
|
Im Reellen/Euklidischen Fall gilt dieser Satz genau dann, wenn $\alpha$
|
|
|
|
diagonalisierbar ist.
|
2022-05-18 12:23:41 +02:00
|
|
|
|
|
|
|
\begin{satz}
|
2022-06-08 23:25:28 +02:00
|
|
|
|
2022-05-25 14:31:20 +02:00
|
|
|
\label{theo:3.2.11}
|
2022-05-18 12:23:41 +02:00
|
|
|
Sei $V$ ein unitärer Vektorraum mit $\dim(V) = n < \infty$ und $\alpha \in \Hom_\C(V, V)$ selbstadjungiert.
|
|
|
|
(Das heißt $\alpha = \alpha^*$) \\
|
|
|
|
Dann gilt:
|
|
|
|
\begin{enumerate}[label=\alph*)]
|
|
|
|
\item Alle Eigenwerte von $\alpha$ sind reell.
|
|
|
|
\item $V$ besitzt eine Orthonormalbasis aus Eigenvektoren von $\alpha$.
|
|
|
|
\item Eigenvektoren zu unterschiedlichen Eigenwerten sind orthogonal.
|
|
|
|
\end{enumerate}
|
|
|
|
\end{satz}
|
2022-06-08 23:25:28 +02:00
|
|
|
\begin{proof}
|
|
|
|
$\alpha$ ist normal: $\alpha \circ \alpha^* = \alpha \circ \alpha = \alpha^* \circ \alpha$
|
|
|
|
\begin{enumerate}[label=\alph*)]
|
|
|
|
\item Sei $\lambda$ Eigenwert von $\alpha$ mit Eigenvektor $v\in V\setminus\{0\}$\\
|
2023-01-31 13:30:38 +01:00
|
|
|
$\overset{\text{\ref{theo:3.2.7}}}{\implies} v$ ist Eigenvektor von $\alpha^*$
|
|
|
|
mit Eigenwert $\overline{\lambda}$ \\ $\implies \lambda v = \alpha(v) =
|
|
|
|
\alpha^*(v) = \overline{\lambda} v \implies (\lambda - \overline{\lambda}) v =
|
|
|
|
0 \overset{v\neq0}{\implies} \lambda = \overline{\lambda}$ \\ $\implies \lambda
|
|
|
|
\in \R$
|
2022-06-08 23:25:28 +02:00
|
|
|
\item Folgt direkt aus Satz \ref{theo:3.2.10} \& $\alpha$ normal.
|
2023-01-31 13:30:38 +01:00
|
|
|
\item Sei $\alpha(v_1) = \lambda_1 v_1, \alpha(v_2)=\lambda_2 v_2, \lambda_1 \neq
|
|
|
|
\lambda_2$
|
2022-06-08 23:25:28 +02:00
|
|
|
\begin{align*}
|
|
|
|
& \lambda_1 \inner{v_1}{v_2} = \inner{\lambda_1 v_1}{v_2} = \inner{\alpha(v_1)}{v_2} =
|
|
|
|
\inner{v_1}{\alpha^*(v_2)} = \inner{v_1}{\alpha(v_2)} \\
|
|
|
|
& = \inner{v_1}{\lambda_2 v_2} = \overline{\lambda_2} \inner{v_1}{v_2} = \lambda_2 \inner{v_1}{v_2} \\
|
|
|
|
& \implies \underbrace{(\lambda_1 - \lambda_2)}_{\neq 0} \inner{v_1}{v_2} = 0 \implies \inner{v_1}{v_2}=0
|
|
|
|
\end{align*}
|
|
|
|
\end{enumerate}
|
|
|
|
\end{proof}
|
2022-05-18 12:23:41 +02:00
|
|
|
|
|
|
|
\begin{lemma}
|
2022-06-08 23:25:28 +02:00
|
|
|
|
2023-01-31 13:30:38 +01:00
|
|
|
Sei $V$ ein euklidischer Vektorraum und $\alpha \in \Hom_\R(V, V)$ normal. Dann
|
|
|
|
ist $\alpha_\C \in \Hom_\C (V_\C, V_\C)$ auch normal.
|
2022-05-18 12:23:41 +02:00
|
|
|
\end{lemma}
|
2022-06-08 23:25:28 +02:00
|
|
|
\begin{proof}
|
|
|
|
Sei $\alpha \in \Hom(V, V), \alpha_\C$ die komplexe Erweiterung von $\alpha$. Seien weiters $v, v' \in V$ mit
|
|
|
|
$v = u+iw, v' = u' +iw', u, w, u', w' \in \R$.
|
|
|
|
\begin{equation}
|
|
|
|
\label{eq:3.2.12.1}
|
|
|
|
\begin{split}
|
|
|
|
\inner{\alpha_\C(v)}{\alpha_\C(v')} & = \inner{\alpha(u) + i\alpha(w)}{\alpha(u') + i\alpha(w')} \\
|
2023-01-31 13:30:38 +01:00
|
|
|
&
|
|
|
|
\begin{multlined}
|
|
|
|
=\inner{\alpha(u)}{\alpha(u')} +
|
2022-06-08 23:25:28 +02:00
|
|
|
i \inner{\alpha(w)}{\alpha(u')}
|
|
|
|
- i \inner{\alpha(u)}{\alpha(w')} \\
|
2023-01-31 13:30:38 +01:00
|
|
|
+ (-i)(-i)\inner{\alpha(w)}{\alpha(w')}
|
|
|
|
\end{multlined}
|
|
|
|
\\
|
|
|
|
&
|
|
|
|
\begin{multlined}
|
|
|
|
= \inner{\alpha^*(u)}{\alpha^*(u')} + i \inner{\alpha^*(w)}{\alpha^*(u')}
|
2022-06-08 23:25:28 +02:00
|
|
|
- i \inner{\alpha^*(u)}{\alpha^*(w')} \\
|
2023-01-31 13:30:38 +01:00
|
|
|
+ (-i)(-i)\inner{\alpha^*(w)}{\alpha^*(w')}
|
|
|
|
\end{multlined}
|
|
|
|
\\
|
2022-06-08 23:25:28 +02:00
|
|
|
& = \inner{\alpha^*(u)+i\alpha^*(w)}{\alpha^*(u')} +
|
|
|
|
\inner{\alpha^*(u) + i\alpha^*(w)}{i\alpha^*(w')} \\
|
|
|
|
& = \inner{\alpha^*(u) + i\alpha^*(w)}{\alpha^*(u') + i\alpha^*(w')} \\
|
|
|
|
& = \inner{(\alpha^*)_\C(v)}{(\alpha^*)_\C(v')}
|
|
|
|
\end{split}
|
|
|
|
\end{equation}
|
|
|
|
Bleibt zu Zeigen, dass $(\alpha^*)_\C = (\alpha_\C)^*$:
|
|
|
|
\begin{align*}
|
|
|
|
\inner{\alpha_\C(v)}{v'} & = \inner{\alpha(u) + i \alpha(w)}{u' + i w'} \\
|
|
|
|
& = \inner{\alpha(u)}{u'} + i \inner{\alpha(w)}{u'} -i \inner{\alpha(u)}{w'} +
|
|
|
|
i (-i) \inner{\alpha(w)}{w'} \\
|
|
|
|
& = \inner{u}{\alpha^*(u')} + i \inner{w}{\alpha^*(u')}
|
|
|
|
-i \inner{u}{\alpha^*(w')} +i (-i) \inner{w}{\alpha^*(w')} \\
|
|
|
|
& = \inner{u+iw}{\alpha^*(u')} + \inner{u+iw}{i\alpha^*(w')} \\
|
|
|
|
& = \inner{u + iw}{\alpha^*(u') + i \alpha^*(w')} = \inner{v}{(\alpha^*)_\C(v')}
|
|
|
|
\end{align*}
|
|
|
|
Das heißt $(\alpha^*)_\C$ ist tatsächlich die adjungierte Abbildung von $\alpha_\C$ und mit
|
|
|
|
\ref{eq:3.2.12.1} folgt nach Satz \ref{theo:3.2.7}, dass $\alpha_\C$ normal ist.
|
|
|
|
\end{proof}
|
2022-05-18 12:23:41 +02:00
|
|
|
|
|
|
|
\begin{lemma}
|
2022-06-08 23:25:28 +02:00
|
|
|
|
2022-05-18 15:02:32 +02:00
|
|
|
\label{theo:3.2.13}
|
2022-05-18 12:23:41 +02:00
|
|
|
Sei $V$ ein euklidischer Vektorraum und $\alpha \in \Hom_\R(V, V)$ normal. Sei $v_\C \in V_\C$
|
|
|
|
$\underbrace{\text{normierter}}_{\text{d.h. }\norm{v_\C}_{V_\C}=1}$ Eigenvektor von $\alpha_\C$ zum Eigenwert
|
|
|
|
$\lambda \in \C\setminus \R$. \\
|
|
|
|
Dann ist $\overline{v_\C}$ normierter Eigenvektor von $\alpha_\C$ zu Eigenwert $\overline{\lambda}$.
|
|
|
|
Insbesondere sind $v_\C, \overline{v_\C}$ orthogonal.
|
|
|
|
\end{lemma}
|
2022-06-08 23:25:28 +02:00
|
|
|
\begin{proof}
|
|
|
|
Sei $u + iw \in V_\C$ mit $u, w\in V$. $\alpha_\C(u + iw) = \alpha(u) + i\alpha(w)$ \\
|
|
|
|
$v_\C = u + iw$ ist normiert
|
|
|
|
\begin{align*}
|
|
|
|
\implies 1 = \inner{u+iw}{u+iw} & = \inner uu_V + i \inner wu_V - i \inner uw_V + \inner ww_V \\
|
|
|
|
& = \inner uu_V + i \inner wu_V - i \inner wu_V + \inner ww_V \\
|
|
|
|
& = \inner uu_V + \inner ww_V \\
|
|
|
|
\implies \inner{u - iw}{u - iw} & = \inner uu_V + \inner{-w}{-w}_V = \inner uu_V \inner ww_V = 1 \\
|
|
|
|
\implies \norm{\overline{v_\C}} = 1
|
|
|
|
\end{align*}
|
|
|
|
$\lambda = \gamma + i \delta$
|
|
|
|
\[
|
|
|
|
\alpha_\C(v_\C) = \lambda v_\C \implies \alpha(u) + i \alpha(w) = (\gamma + i \delta) (u + iw) =
|
|
|
|
(\gamma u - \delta w) + i(\delta u + \gamma w)
|
|
|
|
\]
|
|
|
|
\[
|
|
|
|
\begin{aligned}
|
|
|
|
\alpha(u) = \gamma u + \delta w \\
|
|
|
|
\alpha(w) = \delta u + \gamma w
|
|
|
|
\end{aligned}
|
|
|
|
\implies
|
|
|
|
\begin{aligned}
|
|
|
|
\alpha_\C (\overline{v_\C}) = \alpha(u) + i \alpha(-w) & = (\gamma u + \delta w) + i
|
|
|
|
(\delta u - \gamma w) \\
|
|
|
|
& = (\gamma - i \delta)(u - iw)
|
|
|
|
= \overline{\lambda}\overline{v_\C}
|
|
|
|
\end{aligned}
|
|
|
|
\]
|
|
|
|
$\implies \overline{v_\C}$ ist Eigenvektor von $\alpha_\C$ zum Eigenvektor $\overline\lambda$
|
|
|
|
$\overset{\text{\ref{theo:3.2.9}}}{\implies} \overline{v_\C}$ ist Eigenvektor von $\alpha^*_\C$ zum
|
|
|
|
Eigenwert $\lambda$.
|
|
|
|
\begin{align*}
|
|
|
|
& \lambda \inner{v_\C}{\overline{v_\C}} = \inner{\alpha(v_\C)}{\overline{v_\C}} =
|
|
|
|
\inner{v_\C}{\alpha^*_\C(\overline{v_\C})} = \inner {v_\C}{\lambda \overline{v_\C}}
|
|
|
|
= \overline{\lambda} \inner{v_\C}{\overline{v_\C}} \\
|
|
|
|
& \implies (\underbrace{\lambda - \overline\lambda}
|
|
|
|
_{\mathrlap{\neq 0\text{, weil } \lambda \in \C \setminus \R}})
|
|
|
|
\inner{v_\C}{\overline{v_\C}}= 0 \implies \inner{v_\C}{\overline{v_\C}} = 0
|
|
|
|
\end{align*}
|
|
|
|
|
|
|
|
\end{proof}
|
2022-05-18 12:23:41 +02:00
|
|
|
|
|
|
|
\begin{satz}
|
2022-06-08 23:25:28 +02:00
|
|
|
|
2022-05-18 15:02:32 +02:00
|
|
|
\label{theo:3.2.14}
|
2022-05-18 12:23:41 +02:00
|
|
|
Sei $V$ ein euklidischer Vektorraum mit $\dim(V) = n < \infty$
|
|
|
|
\[
|
|
|
|
\alpha \in \Hom_\R(V, V) \text{ normal} \iff \exists \text{ ONB } B = (e_1, \dots, e_n) \text{ von } V
|
|
|
|
\text{ mit }
|
|
|
|
\]
|
|
|
|
{\setcounter{MaxMatrixCols}{20}
|
|
|
|
\[
|
2023-01-31 13:30:38 +01:00
|
|
|
{}_B M(\alpha)_B =
|
|
|
|
\begin{pmatrix}
|
2022-05-18 12:23:41 +02:00
|
|
|
\lambda_1 \\
|
|
|
|
& \lambda_2 \\
|
|
|
|
& & \ddots \\
|
|
|
|
& & & \lambda_k \\
|
|
|
|
& & & & \tl \gamma_1 & \mathllap{-}\delta_1 \\
|
|
|
|
& & & & \delta_1 & \gamma_1 \br \\
|
|
|
|
& & & & & & \tl \gamma_2 & \mathllap{-}\delta_2 \\
|
|
|
|
& & & & & & \delta_2 & \gamma_2 \br \\
|
|
|
|
& & & & & & & & \ddots \\
|
|
|
|
& & & & & & & & & \tl \gamma_r & \mathllap{-}\delta_r \\
|
|
|
|
& & & & & & & & & \delta_r & \gamma_r \br
|
|
|
|
\end{pmatrix}
|
|
|
|
\]
|
|
|
|
}
|
|
|
|
wobei $\spec(\alpha_\C) = \{\underbrace{\lambda_1, \dots, \lambda_k}_{\in \R},
|
|
|
|
\underbrace{\lambda_{k+1}, \dots, \lambda_{k+r}}_{\in\C\setminus\R} \}$ und
|
|
|
|
$\lambda_{k+j} = \gamma_j + i \delta_j$
|
|
|
|
\subsubsection{Bemerkung}
|
2023-01-31 13:30:38 +01:00
|
|
|
Jedem Kästchen $\eta(\gamma, \delta)$ entspricht ein Paar $\lambda,
|
|
|
|
\overline\lambda$ konjugiert komplexer Eigenwerte von $\alpha_\C$. $\gamma =
|
|
|
|
\real(\lambda), \delta = \Im(\lambda)$
|
2022-05-18 12:23:41 +02:00
|
|
|
\end{satz}
|
2022-06-08 23:25:28 +02:00
|
|
|
\begin{proof}
|
2022-06-10 00:29:02 +02:00
|
|
|
\leavevmode
|
2022-06-08 23:25:28 +02:00
|
|
|
\begin{itemize}
|
|
|
|
\item[$n=1$:] \checkmark
|
|
|
|
\item[$n-1 \to n$:] Wenn $\alpha$ reellen Eigenwert besitzt, kann man wie im Beweis von Satz \ref{theo:3.2.10}
|
|
|
|
vorgehen. Wenn nicht: Sei $v_\C \in V_\C$ ein Eigenvektor von $\alpha_\C$ zum Eigenwert
|
|
|
|
$\lambda = \delta + i\gamma \in \C \setminus \R$.\\
|
|
|
|
Lemma \ref{theo:3.2.13}: $\overline{v_\C}$ ist Eigenvektor von $\alpha_\C$ zum Eigenwert $\overline\lambda$
|
|
|
|
und \\ $\inner{v_\C}{\overline{v_\C}} = 0$ \\
|
|
|
|
Setze
|
|
|
|
\[
|
|
|
|
\begin{rcases}
|
|
|
|
a = \frac{1}{\sqrt2}(v_\C + \overline{v_\C}) \in V & v_\C = u + iw \\
|
|
|
|
b = \frac{1}{i\sqrt2}(v_\C - \overline{v_\C}) \in V & \overline{v_\C} = u - iw
|
|
|
|
\end{rcases}
|
2023-01-31 13:30:38 +01:00
|
|
|
\implies
|
|
|
|
\begin{aligned}
|
2022-06-08 23:25:28 +02:00
|
|
|
v_\C + \overline{v_\C} = 2u \\
|
|
|
|
\frac 1i(v_\C - \overline{v_\C}) = 2w
|
|
|
|
\end{aligned}
|
|
|
|
\]
|
|
|
|
\tl UE\br $\implies \norm a = \norm b = 1$ und $\inner ab = 0$ \\
|
|
|
|
Weiters
|
|
|
|
\[
|
|
|
|
\begin{aligned}
|
|
|
|
\alpha(a) & = \frac{1}{\sqrt{2}}(\alpha_\C(v_\C) + \alpha_\C(\overline {v_\C}))
|
|
|
|
= \frac{1}{\sqrt{2}}(\lambda v_\C + \overline \lambda \overline {v_\C}) \\
|
|
|
|
& = \frac{1}{\sqrt{2}}((\delta + i \gamma)(u + iw) + (\delta - i \gamma)(u - iw)) \\
|
|
|
|
& = \frac{1}{\sqrt{2}}((\delta u - \gamma w) + i(\delta w + \gamma u) +
|
|
|
|
(\delta u - \gamma w) - i(\delta w + \gamma u)) \\
|
|
|
|
& = \frac{1}{\sqrt{2}}(\delta 2 u - \gamma 2 w)
|
|
|
|
= \delta\frac{2u}{\sqrt{2}} - \gamma \frac{2w}{\sqrt{2}} \\
|
|
|
|
& = \delta a - \gamma b \\
|
|
|
|
\alpha(b) & = \frac{1}{i\sqrt{2}}(\alpha_\C(v_\C) - \alpha_\C(\overline{v_\C}))
|
|
|
|
= \frac{1}{i\sqrt{2}}(\lambda v_\C - \overline \lambda \overline{v_\C}) \\
|
|
|
|
& = \frac{1}{i\sqrt{2}}((\delta + i \gamma)(u + iw) - (\delta - i \gamma)(u - iw)) \\
|
|
|
|
& = \frac{1}{i\sqrt{2}}((\delta u - \gamma w) + i(\delta w + \gamma u) -
|
|
|
|
((\delta u - \gamma w) - i(\delta w + \gamma u))) \\
|
|
|
|
& = \frac{1}{i\sqrt{2}}(2i \delta w + 2i \gamma u)
|
|
|
|
= \delta \frac{2iw}{i\sqrt{2}} + \gamma \frac{2iu}{i\sqrt{2}}
|
|
|
|
= \delta \frac{2w}{\sqrt{2}} + \gamma \frac{2u}{\sqrt{2}} \\
|
|
|
|
& = \delta b + \gamma a
|
|
|
|
\end{aligned}
|
|
|
|
\]
|
|
|
|
\item[$\impliedby$:] Da $B$ Orthonormalbasis ist, folgt aus Satz \ref{theo:3.2.4} und
|
|
|
|
$\lambda_i, \gamma_i, \delta_i \in \R$, dass
|
|
|
|
${}_B M(\alpha^*)_B = {}_B M(\alpha)_B^* = \overline{{}_B M(\alpha)_B}^T = {}_B {M(\alpha)_B}^T$. Da
|
|
|
|
${}_B M(\alpha^*)_B$ eine Blockdiagonalmatrix ist, reicht es aus die multiplikative Kommutativität für
|
|
|
|
die einzelnen Blöcke zu zeigen:
|
|
|
|
\begin{align*}
|
2023-01-31 13:30:38 +01:00
|
|
|
& \lambda_i \lambda_i = \lambda_i \lambda_i \checkmark \\
|
|
|
|
&
|
|
|
|
\begin{pmatrix}
|
|
|
|
\gamma & -\delta \\
|
|
|
|
\delta & \gamma
|
|
|
|
\end{pmatrix}
|
|
|
|
\begin{pmatrix}
|
|
|
|
\gamma & \delta \\
|
|
|
|
-\delta & \gamma
|
|
|
|
\end{pmatrix}
|
|
|
|
=
|
|
|
|
\begin{pmatrix}
|
|
|
|
\gamma^2 + \delta^2 & 0 \\
|
|
|
|
0 & \delta^2 +\gamma^2
|
|
|
|
\end{pmatrix}
|
|
|
|
=
|
|
|
|
\begin{pmatrix}
|
|
|
|
\gamma & \delta \\
|
|
|
|
-\delta & \gamma
|
|
|
|
\end{pmatrix}
|
|
|
|
\begin{pmatrix}
|
|
|
|
\gamma & -\delta \\
|
|
|
|
\delta & \gamma
|
|
|
|
\end{pmatrix}
|
2022-06-08 23:25:28 +02:00
|
|
|
\end{align*}
|
|
|
|
\end{itemize}
|
|
|
|
\end{proof}
|
2022-05-18 12:23:41 +02:00
|
|
|
|
|
|
|
\begin{satz}
|
2022-06-08 23:25:28 +02:00
|
|
|
|
2022-05-18 12:23:41 +02:00
|
|
|
Sei $V$ ein euklidischer/unitärer Vektorraum, $\alpha \in \homkv$ \\
|
|
|
|
anti-selbstadjungiert. Dann gilt
|
|
|
|
\begin{enumerate}[label=\alph*)]
|
2022-06-22 10:05:40 +02:00
|
|
|
\item $\lambda \in \spec(\alpha) \implies \real(\lambda) = 0$
|
2022-05-18 12:23:41 +02:00
|
|
|
\item $\alpha_\C$ besitzt eine Orthonormalbasis aus Eigenvektoren.
|
2023-01-31 13:30:38 +01:00
|
|
|
\item Ist $V$ euklidisch, so sind die Diagonalelemente der Matrix ${}_B M(\alpha)_B$
|
|
|
|
gleich $0$, wobei $B$ die Basis aus Satz \ref{theo:3.2.14} ist.
|
2022-05-18 12:23:41 +02:00
|
|
|
\end{enumerate}
|
|
|
|
\end{satz}
|
2022-06-08 23:25:28 +02:00
|
|
|
\begin{proof}
|
|
|
|
\begin{enumerate}[label=\alph*)]
|
|
|
|
\item $\alpha$ ist normal $\implies$ wegen Satz \ref{theo:3.2.9}
|
|
|
|
$v \in \eig_\alpha(\lambda)\implies v \in \eig_{\alpha^*}(\overline \lambda)$
|
|
|
|
Mit $0 \neq v \in \eig_{\alpha}(\lambda)$:
|
|
|
|
\[
|
|
|
|
\alpha(v) = \lambda v = -\alpha^*(v) = -\overline \lambda v \implies \lambda = -\overline \lambda
|
2022-06-22 10:05:40 +02:00
|
|
|
\implies \real (\lambda) = 0
|
2022-06-08 23:25:28 +02:00
|
|
|
\]
|
|
|
|
\item $\alpha$ ist normal, $\alpha^*= -\alpha$
|
|
|
|
\item Folgt aus dem Satz~\ref{theo:3.2.14}, sowie a).
|
|
|
|
\end{enumerate}
|
|
|
|
\end{proof}
|
2022-05-18 12:23:41 +02:00
|
|
|
|
|
|
|
\section{Orthogonale und unitäre Abbildungen}
|
|
|
|
|
|
|
|
\begin{defin}
|
2022-06-09 11:00:51 +02:00
|
|
|
Seien $V, W$ beide euklidische/unitäre Vektorräume, $\alpha \in \Hom(V, W)$ heißt\\ \underline{orthogonal}/%
|
2022-05-18 12:23:41 +02:00
|
|
|
\underline{unitär} wenn
|
|
|
|
\[
|
2023-03-28 11:46:57 +02:00
|
|
|
\forall v, w \in V\colon \inner{\alpha(v)}{\alpha(w)}_W = \inner vw_V
|
2022-05-18 12:23:41 +02:00
|
|
|
\]
|
|
|
|
\end{defin}
|
|
|
|
|
|
|
|
\subsubsection{Bemerkung}
|
|
|
|
Das sind genau die Längen- und Winkelerhaltenden Abbildungen.
|
|
|
|
|
2022-06-08 23:25:28 +02:00
|
|
|
\begin{satz}
|
|
|
|
\label{theo:3.3.2}
|
2022-05-19 09:42:55 +02:00
|
|
|
Seien $V, W$ euklidische/unitäre Vektorräume und $\alpha \in \Hom(V, W)$. Dann sind äquivalent:
|
2022-05-18 15:02:32 +02:00
|
|
|
\begin{enumerate}[label=\alph*)]
|
|
|
|
\item $\alpha$ ist orthogonal/unitär
|
2023-03-28 11:46:57 +02:00
|
|
|
\item $\forall v \in V\colon \norm v_V = 1 \implies \norm{\alpha(v)}_W = 1$
|
|
|
|
\item $\forall v \in V\colon \norm v_V = \norm{\alpha(v)}_W$
|
2022-05-19 09:42:55 +02:00
|
|
|
\item $( e_1, \dots, e_n ) \subseteq V \text{ ONS } \implies
|
|
|
|
( \alpha(e_1), \dots, \alpha(e_n) ) \subseteq W \text{ ONS.}$
|
2022-05-18 15:02:32 +02:00
|
|
|
\end{enumerate}
|
|
|
|
\end{satz}
|
2022-06-08 23:25:28 +02:00
|
|
|
\begin{proof}
|
2022-06-10 00:29:02 +02:00
|
|
|
\leavevmode
|
2022-06-08 23:25:28 +02:00
|
|
|
\begin{itemize}
|
|
|
|
\item[a) $\implies$ b):] \checkmark
|
2023-03-28 11:46:57 +02:00
|
|
|
\item[b) $\implies$ c):] Es gilt für $v \in V\setminus \{0\}\colon$
|
|
|
|
$\norm{\frac{v}{\norm v}} = 1 \implies \norm{\frac{\alpha(v)}{\norm v}} = 1$\\
|
|
|
|
$\implies \norm{\alpha(v)} = \norm v$
|
2023-01-31 13:30:38 +01:00
|
|
|
\item[c) $\implies$ d):] $\inner vw = \frac 14(\norm{v+w}^2 - \norm{v-w}^2
|
|
|
|
+i\norm{v+iw}^2 - i\norm{v-iw}^2)$
|
2022-06-08 23:25:28 +02:00
|
|
|
\item[d) $\implies$ a):] Sei $v, w \in V$.
|
|
|
|
\begin{enumerate}[label=\arabic*. Fall:]
|
|
|
|
\item $v = 0 \implies \inner{\alpha(v)}{\alpha(w)} = \inner{0}{\alpha(w)} = 0 \checkmark$
|
|
|
|
\item $w = \lambda v \implies \inner{\alpha(w)}{\alpha(v)} = \lambda \inner{\alpha(v)}{\alpha(v)}
|
|
|
|
= \lambda \norm{\alpha(v)}^2$. \\
|
|
|
|
Sei $l := \frac{v}{\norm v} \overset{\text{d)}}{\implies} \alpha(l)$ ist ONS
|
|
|
|
$\implies \norm{\alpha(l)} = 1 \implies \norm{\alpha(v)} = \norm v$. \\
|
|
|
|
Es folgt $\inner{\alpha(v)}{\alpha(w)} = \inner vw \checkmark$.
|
2022-06-13 11:29:12 +02:00
|
|
|
\item $v, w$ linear unabhängig. Sei $(e_1, e_2)$ ONS mit $\linspan{\{e_1, e_2\}}
|
|
|
|
= \linspan{\{ v, w \}}$.
|
2022-06-08 23:25:28 +02:00
|
|
|
(Gram-Schmidt liefert Existenz)
|
|
|
|
\begin{align}
|
2023-01-31 13:30:38 +01:00
|
|
|
\implies & (\alpha(e_1), \alpha(e_2)) \text{ ist ONS} \nonumber \\
|
|
|
|
\nonumber
|
|
|
|
& v = \mu_1 e_1 + \mu_2 e_2 \\
|
|
|
|
\nonumber
|
|
|
|
& w = \tau_1 e_1 + \tau_2 e_2 \\
|
|
|
|
\nonumber
|
2022-06-08 23:25:28 +02:00
|
|
|
\implies & \alpha(v) = \mu_1 \alpha(e_1) + \mu_2 \alpha(e_2) \\
|
|
|
|
& \alpha(w) = \tau_1 \alpha(e_1) + \tau_2 \alpha(e_2) \label{eq:3.3.2.1}
|
|
|
|
\end{align}
|
|
|
|
\[
|
|
|
|
\underset{\text{\ref{theo:3.1.15}}}{\implies}
|
|
|
|
\inner vw = \mu_1 \overline{\tau_1} + \mu_2 \overline{\tau_2}
|
|
|
|
\underset{\mathclap{\substack{| \\ (\alpha(e_1), \alpha(e_2)) \text{ ONS \& \ref{eq:3.3.2.1}}}}}{=}
|
|
|
|
\inner{\alpha(v)}{\alpha(w)}
|
|
|
|
\]
|
|
|
|
\end{enumerate}
|
|
|
|
\end{itemize}
|
|
|
|
Beweis für $\inner vw = \frac 14(\norm{v+w}^2 - \norm{v-w}^2 +i\norm{v+iw}^2 - i\norm{v-iw}^2)$:
|
|
|
|
\begin{align*}
|
|
|
|
& \frac 14(\norm{v+w}^2 - \norm{v-w}^2 +i\norm{v+iw}^2 - i\norm{v-iw}^2) \\
|
2023-01-31 13:30:38 +01:00
|
|
|
&
|
|
|
|
\begin{multlined}
|
|
|
|
= \frac 14 (\inner{v+w}{v+w} -
|
|
|
|
\inner{v-w}{v-w}) + \frac i4 (\inner{v+iw}{v+iw} \\- \inner{v-iw}{v-iw})
|
|
|
|
\end{multlined}
|
|
|
|
\\
|
|
|
|
&
|
|
|
|
\begin{multlined}
|
|
|
|
= \frac 14 (\cancel{\inner vv} + \inner vw + \inner wv + \cancel{\inner ww}
|
|
|
|
- \cancel{\inner vv} + \inner vw + \inner wv - \cancel{\inner ww}) \\
|
|
|
|
+ \frac i4 (\cancel{\inner vv} + \inner v{iw} + \inner {iw}v + \cancel{\inner {iw}{iw}}\\
|
|
|
|
- \cancel{\inner vv} - \inner v{-iw} - \inner {-iw}v - \cancel{\inner {-iw}{-iw}})
|
|
|
|
\end{multlined}
|
|
|
|
\\
|
2022-06-08 23:25:28 +02:00
|
|
|
& = \frac 14 (2 \inner vw + 2 \inner wv) +
|
|
|
|
\frac i4 (-i \inner vw + i \inner wv -i \inner vw + i \inner wv) \\
|
|
|
|
& = \frac 14 (2 \inner vw + 2 \inner wv) + \frac 14 (\inner vw - \inner wv + \inner vw - \inner wv) \\
|
|
|
|
& = \frac 14 (2 \inner vw + 2 \inner wv + 2 \inner vw - 2 \inner wv) \\
|
|
|
|
& = \frac 14 4 \inner vw = \inner vw
|
|
|
|
\end{align*}
|
|
|
|
\end{proof}
|
2022-05-18 15:02:32 +02:00
|
|
|
|
|
|
|
\begin{korollar}
|
2022-05-19 09:42:55 +02:00
|
|
|
\label{theo:3.3.3}
|
2022-05-18 15:02:32 +02:00
|
|
|
\begin{enumerate}[label=\alph*)]
|
|
|
|
\item $\alpha$ orthogonal $\implies \alpha_\C$ unitär
|
|
|
|
\item $\alpha$ orthogonal/unitär $\implies \alpha$ injektiv.
|
|
|
|
\end{enumerate}
|
|
|
|
\end{korollar}
|
2022-06-08 23:25:28 +02:00
|
|
|
\begin{proof}
|
|
|
|
\begin{enumerate}[label=\alph*)]
|
|
|
|
\item Folgt direkt aus Satz~\ref{theo:3.3.2}:
|
|
|
|
\begin{align*}
|
2023-01-31 13:30:38 +01:00
|
|
|
\underset{\substack{\rotatebox{90}{=} \\
|
|
|
|
u+iv}}{\norm{v_\C}}
|
2022-06-08 23:25:28 +02:00
|
|
|
= 1 & \iff \norm u^2 + \norm v^2 = 1 \\
|
|
|
|
& \implies \norm{\alpha_\C(v_\C)} = \norm{\alpha(u)}^2 + \norm{\alpha(v)}^2 = 1
|
|
|
|
\end{align*}
|
|
|
|
\item $\alpha(v) = 0 \implies \norm{\alpha(v)} = 0 \implies \norm v = 0 \implies v = 0$
|
|
|
|
\end{enumerate}
|
|
|
|
\end{proof}
|
2022-05-18 15:02:32 +02:00
|
|
|
|
|
|
|
\begin{defin}
|
|
|
|
\begin{itemize}
|
|
|
|
\item $A \in \R^{\nxn}$ heißt \underline{orthogonal} wenn $A^{-1} = A^T$.
|
2022-05-19 09:42:55 +02:00
|
|
|
\item $A \in \C^{\nxn}$ heißt \underline{unitär} wenn $A^{-1} = A^* = \overline{A}^T$.
|
2023-03-28 11:46:57 +02:00
|
|
|
\item $O(n, \R) := \{ A \in \R^{\nxn}\colon \det(A)\neq 0 \land A^{-1} = A^T \}$
|
|
|
|
\item $U(n, \C) := \{ A \in \C^{\nxn}\colon \det(A)\neq 0 \land A^{-1} = A^* \}$
|
2022-05-18 15:02:32 +02:00
|
|
|
\end{itemize}
|
|
|
|
\end{defin}
|
|
|
|
|
2022-05-19 09:42:55 +02:00
|
|
|
\subsubsection{Beispiele}
|
|
|
|
\[
|
2023-01-31 13:30:38 +01:00
|
|
|
\begin{pmatrix}
|
|
|
|
0 & 1 & 0 \\
|
|
|
|
1 & 0 & 0 \\
|
|
|
|
0 & 0 & 1
|
|
|
|
\end{pmatrix}
|
|
|
|
, \;
|
|
|
|
\begin{pmatrix}
|
|
|
|
\sin\varphi & \cos \varphi \\
|
|
|
|
-\cos\varphi & \sin \varphi
|
|
|
|
\end{pmatrix}
|
|
|
|
, \;
|
|
|
|
\frac 13
|
|
|
|
\begin{pmatrix}
|
|
|
|
2 & 1 & 2 \\
|
|
|
|
-2 & 2 & 1 \\
|
|
|
|
1 & 2 & -2
|
|
|
|
\end{pmatrix}
|
|
|
|
\text{orthogonal}
|
2022-05-19 09:42:55 +02:00
|
|
|
\]
|
|
|
|
\[
|
2023-01-31 13:30:38 +01:00
|
|
|
\frac{1}{\sqrt{2}}
|
|
|
|
\begin{pmatrix}
|
|
|
|
1 & i \\
|
|
|
|
-i & -1
|
|
|
|
\end{pmatrix}
|
2022-05-19 09:42:55 +02:00
|
|
|
\text{unitär}
|
|
|
|
\]
|
|
|
|
|
2022-05-18 15:02:32 +02:00
|
|
|
\begin{satz}
|
2022-05-19 09:42:55 +02:00
|
|
|
Es sind äquivalent für $A \in \K^{\nxn}, \K \in \{\R, \C\}$:
|
2022-05-18 15:02:32 +02:00
|
|
|
\begin{enumerate}[label=\alph*)]
|
2022-05-19 09:42:55 +02:00
|
|
|
\item $A$ ist orthogonal/unitär.
|
2022-05-18 15:02:32 +02:00
|
|
|
\item $(a_{1\_}, a_{2\_}, \dots, a_{n\_})$ bilden ONS in $\K^n$.
|
|
|
|
\item $(a_{\_1}, a_{\_2}, \dots, a_{\_n})$ bilden ONS in $\K^n$.
|
|
|
|
\end{enumerate}
|
|
|
|
\end{satz}
|
2022-06-08 23:25:28 +02:00
|
|
|
\begin{proof}
|
2022-06-10 00:29:02 +02:00
|
|
|
\leavevmode
|
2022-06-08 23:25:28 +02:00
|
|
|
\begin{itemize}
|
|
|
|
\item[a) $\iff$ b):] heißt, dass $\inner{a_{i\_}}{a_{j\_}}_{\K^n} = \delta_{ij}$.\\
|
2023-01-31 13:30:38 +01:00
|
|
|
Gleichzeitig gilt $(\inner{a_{i\_}}{a_{j\_}}_{\K^n})_{i,j=1,\dots,n} = A A^*$
|
|
|
|
\\ Also ist b) gleichbedeutend mit $A A^* = I$, also $A^{-1} = A^*$.
|
2022-06-08 23:25:28 +02:00
|
|
|
\item[a) $\iff$ c):] genauso, nur mit $A^* A$
|
|
|
|
\end{itemize}
|
|
|
|
\end{proof}
|
2022-05-18 15:02:32 +02:00
|
|
|
|
2022-05-19 09:42:55 +02:00
|
|
|
\begin{satz}
|
2022-06-08 23:25:28 +02:00
|
|
|
|
2023-01-31 13:30:38 +01:00
|
|
|
Sei $V$ ein euklidischer/unitärer Vektorraum mit $\dim(V)<\infty$ und
|
|
|
|
$\alpha\in\Hom(V, V)$. Dann gilt
|
2022-05-19 09:42:55 +02:00
|
|
|
\[
|
|
|
|
\alpha\text{ ist orthogonal/unitär} \iff \alpha^{-1}=\alpha^*
|
|
|
|
\]
|
|
|
|
\end{satz}
|
2022-06-08 23:25:28 +02:00
|
|
|
\begin{proof}
|
|
|
|
\begin{itemize}
|
|
|
|
\item[$\implies$:] Seien $v,w \in V$.
|
|
|
|
$\alpha^{-1}$ existiert wegen Korollar~\ref{theo:3.3.3}b)
|
|
|
|
\begin{align*}
|
2022-10-05 18:46:19 +02:00
|
|
|
\inner{v}{\alpha^*(w) - \alpha^{-1}(w)} & = \inner{v}{\alpha^*(w)} - \inner{v}{\alpha^{-1}(w)} \\
|
|
|
|
& = \inner{\alpha(v)}{w} - \inner{v}{\alpha^{-1}(w)} \\
|
|
|
|
& = \inner{\alpha(v)}{w} - \inner{\alpha(v)}{\alpha(\alpha^{-1}(w))} \\
|
|
|
|
& = \inner{\alpha(v)}{w} - \inner{\alpha(v)}{w} = 0
|
2022-06-08 23:25:28 +02:00
|
|
|
\end{align*}
|
|
|
|
\item[$\impliedby$:] Sei $\alpha^* = \alpha^{-1}, u,v,w\in V, v = \alpha(w)$
|
|
|
|
\[
|
|
|
|
\implies \inner uw = \inner{u}{\alpha^{-1}(v)} = \inner{u}{\alpha^*(v)} = \inner{\alpha(u)}{v}
|
|
|
|
=\inner{\alpha(u)}{\alpha(w)} \checkmark
|
|
|
|
\]
|
|
|
|
\end{itemize}
|
|
|
|
\end{proof}
|
2022-05-19 09:42:55 +02:00
|
|
|
|
|
|
|
\begin{satz}
|
2022-06-08 23:25:28 +02:00
|
|
|
|
2023-01-31 13:30:38 +01:00
|
|
|
Sei $B$ Orthonormalbasis, $\alpha \in \Hom(V, V), A= {}_B M(\alpha)_B$. Dann
|
|
|
|
gilt:
|
2022-05-19 09:42:55 +02:00
|
|
|
\begin{enumerate}[label=\alph*)]
|
|
|
|
\item $\alpha$ orthogonal $\iff A$ orthogonal.
|
|
|
|
\item $\alpha$ unitär $\iff A$ unitär.
|
|
|
|
\end{enumerate}
|
|
|
|
\end{satz}
|
2022-06-08 23:25:28 +02:00
|
|
|
\begin{proof}
|
|
|
|
Satz \ref{theo:3.2.4}: ${}_B M(\alpha^*)_B = A^*$ \\
|
|
|
|
${}_B M(\alpha^{-1})_B = A^{-1}$
|
|
|
|
\end{proof}
|
2022-05-19 09:42:55 +02:00
|
|
|
|
|
|
|
\begin{satz}
|
|
|
|
\begin{enumerate}[label=\alph*)]
|
|
|
|
\item $O(n, \R)$ ist Untergruppe von $\GL(n, \R)$.
|
|
|
|
\item $U(n, \C)$ ist Untergruppe von $\GL(n, \C)$.
|
|
|
|
\item $A \in O(n, \R) \implies \det(A) \in \{1, -1\}$.
|
2022-06-13 10:53:40 +02:00
|
|
|
\item $A \in U(n, \C) \implies \abs{ \det(A) } = 1
|
2022-05-19 09:42:55 +02:00
|
|
|
\implies \det(A) = e^{i\alpha}, \alpha \in [ 0, 2\pi ]$.
|
|
|
|
\end{enumerate}
|
|
|
|
\end{satz}
|
2022-06-08 23:25:28 +02:00
|
|
|
\begin{proof}
|
|
|
|
\begin{enumerate}[label=\alph*)]
|
|
|
|
\item $I \in O(n, \R), A,B \in O(n, \R)$ \\
|
2022-06-30 13:04:01 +02:00
|
|
|
$(AB)^* = B^*A^* = B^{-1}A^{-1} = (AB)^{-1} \implies AB \in (n, \R)$ \\
|
2022-06-08 23:25:28 +02:00
|
|
|
$(A^{-1})^{{}^*} = (A^*)^{{}^*} = A \implies A^{-1} \in O(n, \R)$
|
|
|
|
\item Genauso
|
|
|
|
\item $A^{-1} = A^T$.
|
|
|
|
\begin{align*}
|
|
|
|
1 & = \det(A A^{-1}) = \det(A)\det(A^{-1}) = \det(A) \det(A^T) = \det(A) \det(A) \\
|
|
|
|
& = \det(A)^2 \implies \det(A) \in \{-1, 1\}
|
|
|
|
\end{align*}
|
|
|
|
\item $\det(\overline A) = \overline{\det(A)} \implies \det(A^*) = \overline{\det(A)}$.
|
|
|
|
\begin{align*}
|
|
|
|
& 1 = \det(A) \det(A^{-1})=\det(A) \det(A^*) = \det(A) \overline{\det(A)} \\
|
2022-06-13 10:53:40 +02:00
|
|
|
& \implies \abs{ \det(A) }=1
|
2022-06-08 23:25:28 +02:00
|
|
|
\end{align*}
|
|
|
|
\end{enumerate}
|
|
|
|
\end{proof}
|
2022-05-19 09:42:55 +02:00
|
|
|
|
2022-05-25 14:31:20 +02:00
|
|
|
\subsubsection{Polarzerlegung}
|
2022-06-13 10:53:40 +02:00
|
|
|
$z = \overbrace{e^{i\varphi}}^{\text{Betrag 1}} \underbrace{\abs{ z }}_{\mathclap{\text{positiv reell}}}$\\
|
2022-05-25 14:31:20 +02:00
|
|
|
Betrag 1 $\cong$ unitär, positiv $\cong$ selbstadjungiert mit positiven Eigenwerten.
|
|
|
|
|
2023-01-31 13:30:38 +01:00
|
|
|
\begin{satz}[Polarzerlegung]
|
2022-05-25 14:31:20 +02:00
|
|
|
\label{theo:3.3.9}
|
|
|
|
Sei $V$ ein euklidischer/unitärer Vektorraum, $\dim(V)<\infty$ und sei $\alpha \in \Hom(V,V)$. Dann existiert
|
|
|
|
eine orthogonale/unitäre Abbildung $\beta$ und eine selbstadjungierte Abbildung $\gamma$ mit lauter
|
|
|
|
nichtnegativen reellen Eigenwerten, sodass $\alpha = \beta \circ \gamma$. Falls $\alpha$ Automorphismus ist,
|
|
|
|
so sind alle Eigenwerte von $\gamma$ positiv, und $\gamma, \beta$ eindeutig bestimmt.
|
2022-06-08 23:25:28 +02:00
|
|
|
\end{satz}
|
|
|
|
\begin{proof}
|
|
|
|
Zunächst: $\alpha$ Automorphismus.
|
|
|
|
\begin{itemize}
|
|
|
|
\item $\alpha^*$ ist auch Automorphismus.
|
|
|
|
\begin{align*}
|
2023-03-28 11:46:57 +02:00
|
|
|
\text{Sei }\alpha^*(v) = 0 & \implies \forall w \in V\colon
|
2022-06-08 23:25:28 +02:00
|
|
|
\inner{w}{\alpha^*(v)} = 0 \\
|
2023-03-28 11:46:57 +02:00
|
|
|
& \implies \forall w \in V\colon \inner{\alpha(V)}{v} = 0 \\
|
2022-06-08 23:25:28 +02:00
|
|
|
& \implies v \in \underbrace{\im(\alpha)}_v{}^\bot = \{0\} \\
|
|
|
|
& \implies v = 0
|
|
|
|
\end{align*}
|
|
|
|
\item $\alpha^*\circ \alpha$ ist Automorphismus und selbstadjungiert.
|
|
|
|
\[
|
|
|
|
(\alpha^*\alpha)^{{}^*} = \alpha^*(\alpha^*)^{{}^*} = \alpha^* \alpha
|
|
|
|
\]
|
|
|
|
\item Satz \ref{theo:3.2.11} ist $\spec(\alpha^*\alpha) \subseteq \R\setminus\{0\}$.
|
2023-01-31 13:30:38 +01:00
|
|
|
$\exists$ ONB $(e_1, \dots, e_n)$ von V aus Eigenvektoren, $(\lambda_1, \dots,
|
|
|
|
\lambda_n) \subseteq \R\setminus\{0\}$ Eigenwerte. Behauptung: $\lambda_i > 0
|
|
|
|
\forall i \in [n]$
|
2022-06-08 23:25:28 +02:00
|
|
|
\[
|
|
|
|
\lambda_i = \lambda_i \inner{e_i}{e_i} = \inner{\lambda_ie_i}{e_i} =
|
|
|
|
\inner{\alpha^*\alpha(e_i)}{e_i} = \inner{\alpha(e_i)}{\alpha(e_i)} > 0
|
|
|
|
\]
|
2023-03-28 11:46:57 +02:00
|
|
|
\item $\gamma\colon
|
2023-01-31 13:30:38 +01:00
|
|
|
\begin{cases}
|
|
|
|
v & \to V \\
|
|
|
|
v = \sum_{i=1}^n \mu_i e_i & \mapsto \sum_{i=1}^n \mu_i \sqrt{\lambda_i} e_i
|
|
|
|
\end{cases}
|
|
|
|
$
|
|
|
|
${}_B M(\alpha^* \alpha)_B =
|
|
|
|
\begin{pmatrix}
|
|
|
|
\lambda_1 \\
|
|
|
|
& \ddots \\
|
|
|
|
& & \lambda_n
|
|
|
|
\end{pmatrix}
|
|
|
|
$
|
|
|
|
${}_B M(\gamma)_B =
|
|
|
|
\begin{pmatrix}
|
|
|
|
\sqrt{\lambda_1} \\
|
|
|
|
& \ddots \\
|
|
|
|
& & \sqrt{\lambda_n}
|
|
|
|
\end{pmatrix}
|
|
|
|
$
|
2022-06-08 23:25:28 +02:00
|
|
|
$\implies \gamma$ selbstadjungiert \\
|
|
|
|
$\implies \gamma^2 = \alpha^* \alpha$
|
|
|
|
\item $\alpha = \beta \circ \gamma \implies \beta = \alpha \circ \gamma^{-1}$ Behauptung: $\beta$ unitär, das
|
|
|
|
heißt $\beta^{-1} = \beta^*$
|
|
|
|
\begin{align*}
|
|
|
|
\beta^{-1} & = (\alpha \gamma^{-1})^{{}^{-1}} = \gamma \alpha^{-1} = \gamma^{-1} \gamma^2 \alpha^{-1}
|
|
|
|
= \gamma^{-1} \alpha^* \alpha \alpha^{-1} = \gamma^{-1} \alpha^* \\
|
|
|
|
& = (\alpha(\gamma^{-1})^*)^{{}^*} = (\alpha \gamma^{-1})^{{}^*} = \beta^*
|
|
|
|
\end{align*}
|
|
|
|
$\implies \beta$ unitär.
|
|
|
|
\end{itemize}
|
|
|
|
\underline{Eindeutigkeit:} $\alpha = \beta' \circ \gamma'$
|
|
|
|
\[
|
|
|
|
\gamma^2 = \alpha^* \alpha = (\gamma')^* \underbrace{\beta'^* \beta'}_{\id}\gamma' = (\gamma')^* \gamma'
|
|
|
|
= (\gamma')^2
|
|
|
|
\]
|
|
|
|
$\implies \gamma, \gamma'$ haben dieselben Eigenwerte und Eigenvektoren. \\
|
|
|
|
$\implies \gamma = \gamma' \implies \beta = \beta'$ \\
|
|
|
|
\underline{$\alpha$ nicht injektiv}:
|
|
|
|
\begin{itemize}
|
|
|
|
\item $W := \ker(\alpha)^\bot \implies \alpha|_W$ ist injektiv. \\
|
|
|
|
Sei $v, w \in W, \alpha(v) = \alpha(w) \implies \alpha(v - w) = 0 \implies v - w \in \ker(\alpha)
|
|
|
|
= W^\bot \cap W = \{0\} \implies v = w$
|
|
|
|
$\implies \alpha|_W = \beta_W \circ \gamma_W$ mit $\beta, \gamma \in \Hom(W, W); \beta_W$ unitär,
|
|
|
|
$\gamma_W$ selbstadjungiert mit positiven Eigenwerten.
|
2023-01-31 13:30:38 +01:00
|
|
|
\item Sei $( e_1, \dots, e_k )$ ONB von $W$, $(e_1, \dots, e_k, \dots, e_n)$ ONB von
|
|
|
|
$V$.
|
2023-03-28 11:46:57 +02:00
|
|
|
\item $\pi\colon
|
2023-01-31 13:30:38 +01:00
|
|
|
\begin{cases}
|
|
|
|
V & \to W \\
|
|
|
|
v = \sum_{i=1}^n \lambda_i e_i & \to \sum_{i=1}^k \lambda_i e_i
|
|
|
|
\end{cases}
|
|
|
|
$
|
2022-06-08 23:25:28 +02:00
|
|
|
orthogonale Projektion auf $W$. \\
|
|
|
|
$\pi$ ist selbstadjungiert:
|
|
|
|
\begin{align*}
|
|
|
|
& \inner{\pi(v)}w = \inner{\sum_{i=1}^k \lambda_i e_i}{\sum_{j=1}^n \mu_j e_j} = \sum_{i=1}^k \lambda_i
|
|
|
|
\overline{\mu_i} \\
|
|
|
|
& \inner{v}{\pi(w)} = \inner{\sum_{i=1}^n \lambda_i e_i}{\sum_{j=1}^k \mu_j e_j} = \sum_{i=1}^k \lambda_i
|
|
|
|
\overline{\mu_i}
|
|
|
|
\end{align*}
|
|
|
|
$\gamma := \pi^* \circ \gamma_W \circ \pi = \pi \circ \gamma_W \circ \pi$
|
|
|
|
\begin{align*}
|
2023-01-31 13:30:38 +01:00
|
|
|
& v\in W & & \implies \gamma(v) = \gamma_W(v) \\
|
|
|
|
% Hier fehlt noch was aus der VO, nachschauen
|
2022-06-08 23:25:28 +02:00
|
|
|
& v \in W^\bot = \ker(\alpha) & & \implies \gamma(v) = 0
|
|
|
|
\end{align*}
|
|
|
|
$\beta := \underset{W}{\beta_W} \oplus \underset{W^\bot}{I}$ ist orthogonal/unitär. \\
|
|
|
|
$\implies \alpha = \beta \circ \gamma$
|
|
|
|
\end{itemize}
|
|
|
|
\end{proof}
|
2022-05-25 14:31:20 +02:00
|
|
|
|
|
|
|
\begin{defin}
|
|
|
|
$\alpha \in \Hom(V, V)$ heißt
|
|
|
|
$\genfrac{}{}{0pt}{0}{\text{\underline{positiv definit}}}{\text{\underline{positiv semi-definit}}}$, wenn
|
2023-03-28 11:46:57 +02:00
|
|
|
$\forall v \in V\setminus \{0\}\colon \inner{\alpha(v)}{v}\genfrac{}{}{0pt}{0}{>}{\ge}0$
|
2022-05-25 14:31:20 +02:00
|
|
|
\end{defin}
|
|
|
|
|
|
|
|
\begin{lemma}
|
2022-06-08 23:25:28 +02:00
|
|
|
|
2022-05-25 14:31:20 +02:00
|
|
|
Sei $\alpha$ selbstadjungiert. Dann gilt
|
|
|
|
\begin{align*}
|
|
|
|
\alpha \text{ positiv definit} & \iff \text{ Alle Eigenwerte positiv} \\
|
|
|
|
\alpha \text{ positiv semi-definit} & \iff \text{ Alle Eigenwerte nicht-negativ}
|
|
|
|
\end{align*}
|
|
|
|
\end{lemma}
|
2022-06-08 23:25:28 +02:00
|
|
|
\begin{proof}
|
|
|
|
Sei $(e_1, \dots, e_n)$ ONB aus Eigenvektoren,
|
|
|
|
$(\lambda_1, \dots, \lambda_n), \lambda_i \in \R$ Eigenwerte, $v = \sum_{i=1}^n
|
|
|
|
\mu_i e_i$
|
|
|
|
\begin{align*}
|
|
|
|
\inner{\alpha(v)}v = \inner{\sum_{i=1}^n \lambda_i \mu_i e_i}{\sum_{j=1}^n \mu_j e_j} =
|
2022-06-13 10:53:40 +02:00
|
|
|
\sum_{i=1}^n \lambda_i \mu_i \overline{\mu_i} = \sum_{i=1}^n \lambda_i \underbrace{\abs{ \mu_i}}_{\ge 0}{}^2
|
2022-06-08 23:25:28 +02:00
|
|
|
\end{align*}
|
2023-03-28 11:46:57 +02:00
|
|
|
Angenommen $\exists i \in [n]\colon\lambda_j \le 0 \implies \inner{\alpha(\lambda_j)}{e_j} = \lambda_j \le 0
|
2022-06-08 23:25:28 +02:00
|
|
|
\implies \alpha$ nicht positiv definit. \\
|
2023-03-28 11:46:57 +02:00
|
|
|
Angenommen $\forall i \in [n]\colon \lambda_i > 0 \implies \inner{\alpha(v)}v = \sum_{i=1}^n \lambda_i
|
2022-06-13 10:53:40 +02:00
|
|
|
\abs{ \mu_i }^2 > 0$.
|
2022-06-08 23:25:28 +02:00
|
|
|
\end{proof}
|
2022-05-25 14:31:20 +02:00
|
|
|
|
|
|
|
\subsubsection{Bemerkung}
|
2023-01-31 13:30:38 +01:00
|
|
|
In der Polarzerlegung ist $\beta$ orthogonal/unitär und $\gamma$
|
|
|
|
selbstadjungiert \& positiv (semi-)definit.
|
2022-05-25 14:31:20 +02:00
|
|
|
|
|
|
|
\section[Hauptachsentheorem für symmetrische/hermitesche Matrizen]{Hauptachsentheorem für \\symmetrische/hermitesche Matrizen}
|
|
|
|
|
|
|
|
\subsubsection{Ziel}
|
|
|
|
Klassifizierung aller Skalarprodukte.
|
|
|
|
|
|
|
|
\begin{defin}
|
|
|
|
\begin{align*}
|
|
|
|
A \in & \R^{\nxn} \text{heißt} & & \text{\underline{symmetrisch}, wenn} & & A = A^T \\
|
|
|
|
A \in & \C^{\nxn} \text{heißt} & & \text{\underline{hermitesch}, wenn} & & A = A^* \\
|
|
|
|
A \in & \R^{\nxn} \text{heißt} & & \text{\underline{schiefsymmetrisch}, wenn} & & A = -A^T \\
|
2022-06-22 10:05:40 +02:00
|
|
|
A \in & \C^{\nxn} \text{heißt} & & \text{\underline{schiefhermitesch}, wenn} & & A = -A^*
|
2022-05-25 14:31:20 +02:00
|
|
|
\end{align*}
|
|
|
|
\end{defin}
|
|
|
|
|
|
|
|
\begin{satz}
|
2022-06-08 23:25:28 +02:00
|
|
|
|
2022-05-25 14:31:20 +02:00
|
|
|
$V$ euklidischer/unitärer Vektorraum, $\dim(V)< \infty$. Dann gilt:
|
|
|
|
\begin{itemize}
|
|
|
|
\item $\alpha$ selbstadjungiert $\iff \exists$ ONB $B$ mit ${}_B M(\alpha)_B$ \\
|
|
|
|
symmetrisch/hermitesch.
|
|
|
|
\item $\alpha$ anti-selbstadjungiert $\iff \exists$ ONB $B$ mit ${}_B M(\alpha)_B$ \\
|
|
|
|
schiefsymmetrisch/schiefhermitesch.
|
|
|
|
\end{itemize}
|
|
|
|
\end{satz}
|
|
|
|
|
|
|
|
\begin{satz}
|
2022-06-01 10:02:40 +02:00
|
|
|
\label{theo:3.4.3}
|
2022-05-25 14:31:20 +02:00
|
|
|
$A$ symmetrisch/hermitesch. \\
|
|
|
|
$\implies \exists$ orthogonale/unitäre Matrix $P$ mit $D = P^{-1} A P$ reelle Diagonalmatrix
|
|
|
|
\end{satz}
|
2022-06-08 23:25:28 +02:00
|
|
|
\begin{proof}
|
|
|
|
Sei $E = \{e_1, \dots, e_n\} \subseteq \K^n$ kanonische Basis, \\
|
2023-03-28 11:46:57 +02:00
|
|
|
$\varphi_A\colon
|
2023-01-31 13:30:38 +01:00
|
|
|
\begin{cases}
|
|
|
|
\K^n & \to \K^n \\
|
|
|
|
v & \mapsto Av
|
|
|
|
\end{cases}
|
2022-06-08 23:25:28 +02:00
|
|
|
\implies {}_E M(\varphi_A)_E = A \implies {}_E M(\varphi_A^*)_E = A^* = A$ \\
|
|
|
|
$\implies \varphi_A$ selbstadjungiert.
|
|
|
|
$\implies \exists$ ONB $B=(b_1, \dots, b_n)$ von $\K^n$ aus ~Eigenvektoren von $\varphi_A$, Eigenwerte sind
|
|
|
|
reell.
|
|
|
|
$\implies {}_B M(\varphi_A)_B = D = \diag(\lambda_1, \dots, \lambda_n)$ \\
|
|
|
|
$\implies {}_B M(\varphi_A)_B = \underbrace{{}_B M(\id)_E}_{P^{-1}}
|
|
|
|
\underbrace{{}_E M(\varphi_A)_E}_{A} \underbrace{{}_E M(\id)_B}_{\underbrace{(b_1, \dots, b_n)}_{P}}
|
|
|
|
\implies D = P^{-1} A P$
|
|
|
|
\end{proof}
|
2022-05-25 14:31:20 +02:00
|
|
|
|
|
|
|
\begin{korollar}
|
2022-06-08 23:25:28 +02:00
|
|
|
|
2022-05-25 14:31:20 +02:00
|
|
|
$A$ symmetrisch/hermitesch $\implies$ Eigenvektoren zu verschiedenen Eigenwerten sind orthogonal.
|
|
|
|
\end{korollar}
|
|
|
|
|
|
|
|
\subsubsection{Berechnung der Hauptachsentransformation}
|
|
|
|
$A \in \K^{\nxn}$
|
|
|
|
\begin{enumerate}[label=\arabic*)]
|
|
|
|
\item $\chi_A(\lambda) = \prod_{j=1}^r (\lambda_j - \lambda)^{d_j} $ ($\sum d_j = n$; algebraische Vielfachheit = geometrische Vielfachheit)
|
2023-03-28 11:46:57 +02:00
|
|
|
\item Für jedes $j \in [r]\colon$ Berechne Basis $B_j = (b_1^j, \dots, b_{d_j}^j)$
|
|
|
|
von $\ker(A - \lambda_j I)$
|
2023-01-31 13:30:38 +01:00
|
|
|
\item Orthogonalisiere $B_j$ zu ONS $E_j = (e_1^j, \dots, e_{d_j}^j)$ mittels
|
|
|
|
Gram-Schmidt Verfahren.
|
2022-05-25 14:31:20 +02:00
|
|
|
\item $B = \bigcup_{j=1}^r E_j$ ist die gesuchte Orthonormalbasis.\\
|
2023-01-31 13:30:38 +01:00
|
|
|
Insbesondere $B^{-1} A B =
|
|
|
|
\begin{pmatrix}
|
|
|
|
\lambda_1 \\
|
|
|
|
& \ddots \\
|
|
|
|
& & \lambda_n
|
|
|
|
\end{pmatrix}
|
|
|
|
$ \\
|
2022-05-25 14:31:20 +02:00
|
|
|
$B^{-1} = \overline B^T$
|
|
|
|
\end{enumerate}
|
|
|
|
Polarzerlegung $A \in \K^{\nxn}, A^* A$ symmetrisch/hermitesch
|
|
|
|
\[
|
2023-01-31 13:30:38 +01:00
|
|
|
\implies A^* A = P^*
|
|
|
|
\begin{pmatrix}
|
|
|
|
\lambda_1 \\
|
|
|
|
& \ddots \\
|
|
|
|
& & \lambda_n
|
|
|
|
\end{pmatrix}
|
|
|
|
P
|
2022-05-25 14:31:20 +02:00
|
|
|
\]
|
|
|
|
\[
|
2023-01-31 13:30:38 +01:00
|
|
|
(A^* A)^{\frac 12} = P^*
|
|
|
|
\begin{pmatrix}
|
|
|
|
\sqrt{\lambda_1} \\
|
|
|
|
& \ddots \\
|
|
|
|
& & \sqrt{\lambda_n}
|
|
|
|
\end{pmatrix}
|
|
|
|
P
|
2022-05-25 14:31:20 +02:00
|
|
|
\underset{\mathclap{O=AS^{-1}}}{=} S \implies A = \underset{\mathclap{\substack{| \\ \text{orthogonal/unitär}}}}{O} S
|
|
|
|
\]
|
|
|
|
|
|
|
|
\begin{satz}
|
2022-06-01 10:02:40 +02:00
|
|
|
Sei $V$ ein reeller/komplexer Vektorraum mit $\dim(V) = n, B=(b_1, \dots, b_n)$ Basis. Für $A \in K^{\nxn}$ ist
|
|
|
|
\begin{equation}
|
|
|
|
\label{eq:3.4.5.1}
|
|
|
|
\inner vw := {}_B \Phi(v)^T A {}_B \overline{\Phi(w)}
|
|
|
|
\end{equation}
|
|
|
|
Genau dann ein Skalarprodukt, wenn $A$ symmetrisch/hermitesch und\\
|
2023-03-28 11:46:57 +02:00
|
|
|
$\underbrace{\text{positiv definit}}_{\substack{\forall \lambda \in \spec(A)\colon \lambda > 0 \\
|
|
|
|
\rotatebox{90}{\tiny$\iff$} \\ \forall v \in V\setminus\{0\}\colon \inner{Av}{v} > 0}}$ ist. \\
|
2022-06-01 10:02:40 +02:00
|
|
|
Umgekehrt: Sei $\inner ..$ Skalarprodukt und $B$ Basis
|
|
|
|
\[
|
|
|
|
\implies \inner vw = {}_B \Phi(v)^T \underbrace{(\inner{b_i}{b_j})_{i,j=1}^n}_A {}_B \overline{\Phi(w)}
|
|
|
|
\]
|
2023-01-31 13:30:38 +01:00
|
|
|
($A = P^* \left(
|
|
|
|
\begin{smallmatrix}
|
|
|
|
\lambda_1 \\ & \ddots \\ & & \lambda_n
|
|
|
|
\end{smallmatrix}
|
|
|
|
\right)
|
2022-05-25 14:31:20 +02:00
|
|
|
P \implies \inner vw = {}_B \Phi(v)^T P^*
|
2023-01-31 13:30:38 +01:00
|
|
|
\left(
|
|
|
|
\begin{smallmatrix}
|
|
|
|
\lambda_1 \\ & \ddots \\ & & \lambda_n
|
|
|
|
\end{smallmatrix}
|
|
|
|
\right)
|
2022-05-25 14:31:20 +02:00
|
|
|
P {}_B \overline{\Phi(w)}$
|
|
|
|
$\implies$ in geeigneter Basis ist jedes Skalarprodukt durch $\sum_{i=1}^n \mu_i \overline \lambda_i$ gegeben.)
|
|
|
|
\end{satz}
|
2022-06-08 23:25:28 +02:00
|
|
|
\begin{proof}
|
|
|
|
Nur für $\K=\C$. Angenommen \ref{eq:3.4.5.1} ist Skalarprodukt.
|
|
|
|
\begin{align*}
|
|
|
|
\implies & \underset{\rotatebox{90}{$=$}}{\inner{b_i}{b_j}} = {}_B \Phi(b_i)^T A {}_B \overline{\Phi(b_j)}
|
|
|
|
= e_i^T A e_j = a_{ij} \\
|
|
|
|
& \overline{\inner{b_j}{b_i}} = \overline{a_{ji}} \\
|
|
|
|
\implies & a_{ij} = \overline{a_{ji}} \implies A \text{ hermitesch}
|
|
|
|
\end{align*}
|
2023-03-28 11:46:57 +02:00
|
|
|
Weiters muss $A$ positiv definit sein: Angenommen $\exists x \in \C^n \setminus \{0\}\colon x^T Ax = 0
|
2022-06-08 23:25:28 +02:00
|
|
|
\implies v := \sum x_i b_i$, das heißt ${}_B \Phi(v) = x$ erfüllt $\inner vv = {}_B \Phi(v)^T A {}_B
|
|
|
|
\overline{\Phi(v)} = x^T A \overline x = 0$ \\
|
|
|
|
Sei $A$ hermitesch \& positiv definit. Klarerweise gilt dann für \\
|
|
|
|
$\inner uv := {}_B \Phi(u)^T A {}_B \overline{\Phi(v)}$:
|
|
|
|
\begin{align*}
|
|
|
|
\inner{u+v}{w} & = \inner uw + \inner vw \\
|
|
|
|
\inner uv & = \overline{\inner vu} \\
|
|
|
|
\inner{\lambda u}{v} & = \lambda \inner uv
|
|
|
|
\end{align*}
|
2023-03-28 11:46:57 +02:00
|
|
|
Bleibt zu zeigen, dass $\forall v \in V\setminus\{0\}\colon \inner vv > 0$ \\
|
2022-06-08 23:25:28 +02:00
|
|
|
Satz \ref{theo:3.4.3} $\implies \exists$ unitäre Matrix $U$ mit
|
|
|
|
$A = \underset{\substack{\rotatebox{90}{$=$}\\U^{-1}}}{U^*}
|
|
|
|
\underbrace{\diag(\lambda_1, \dots, \lambda_n)}_\Sigma U, \lambda_i \in (0, \infty)\forall i \in [n]$
|
|
|
|
\begin{align*}
|
|
|
|
& \inner vv={}_B \Phi(v)^T A {}_B \overline{\Phi(v)} = {}_B \Phi(v)^T U^* \Sigma U {}_B \overline{\Phi(v)}
|
2023-01-31 13:30:38 +01:00
|
|
|
= \underset{\substack{\rotatebox{90}{$=$} \\
|
|
|
|
\sum \lambda_i \abs{ x_i }^2 > 0}}
|
2022-06-08 23:25:28 +02:00
|
|
|
{(\overline U {}_B\Phi(v))^T \Sigma \overline{\overline U {}_B (v)}} \\
|
|
|
|
& v \neq0 \implies \overline U {}_B \Phi(v) =
|
2023-01-31 13:30:38 +01:00
|
|
|
\left(
|
|
|
|
\begin{smallmatrix}
|
|
|
|
x_1 \\ \vdots \\ x_n
|
|
|
|
\end{smallmatrix}
|
|
|
|
\right) \neq 0
|
2022-06-08 23:25:28 +02:00
|
|
|
\end{align*}
|
|
|
|
\end{proof}
|
2022-05-18 15:02:32 +02:00
|
|
|
|
2022-06-07 19:16:17 +02:00
|
|
|
\begin{defin}
|
|
|
|
Sei $A \in \K^{\nxn}$ eine symmetrische/hermitesche Matrix.
|
|
|
|
\begin{itemize}
|
|
|
|
\item \[
|
2023-03-28 11:46:57 +02:00
|
|
|
t(A) := \abs{ \{\lambda \in \spec(A)\colon \lambda > 0 \} }
|
2022-06-07 19:16:17 +02:00
|
|
|
\]
|
|
|
|
heißt \underline{Trägheitsindex} von $A$.
|
|
|
|
\item $A, B$ heißen \underline{kongruent} wenn eine invertierbare Matrix $Q \in \K^{\nxn}$ existiert mit
|
|
|
|
$B = Q^* A Q$
|
|
|
|
\end{itemize}
|
|
|
|
\end{defin}
|
|
|
|
|
|
|
|
\subsubsection{Bemerkung}
|
|
|
|
$M_B(\sigma), M_{B'}(\sigma)$ sind kongruent (\& umgekehrt)
|
|
|
|
|
2023-01-31 13:30:38 +01:00
|
|
|
\begin{satz}[Trägheitssatz von Sylvester]
|
2022-06-07 19:16:17 +02:00
|
|
|
Sei $A \in \K^{\nxn}$ symmetrisch/hermitesch mit $\rg(A) = r, t(A)=t$. Dann gilt:
|
|
|
|
\begin{enumerate}[label=\alph*)]
|
|
|
|
\item Es gibt $S \in \K^{\nxn}$ invertierbar mit
|
|
|
|
\[
|
|
|
|
S^* A S =
|
|
|
|
\diag(\underbrace{1, \dots, 1}_t, \underbrace{-1, \dots, -1}_{r-t}, \underbrace{0, \dots, 0}_{n-r})
|
|
|
|
\]
|
|
|
|
$A$ ist zu dieser Matrix kongruent.
|
|
|
|
\item $A, B$ kongruent $\iff t(A) = t(B) \land \rg(A) = \rg(B)$
|
|
|
|
\end{enumerate}
|
|
|
|
\subsubsection{Bemerkung}
|
2023-01-31 13:30:38 +01:00
|
|
|
Trägheitsindex und Rang charakterisieren symmetrische Bilinearformen /
|
|
|
|
hermitesche Sesquilinearformen komplett.
|
2022-06-07 19:16:17 +02:00
|
|
|
\end{satz}
|
2022-06-08 23:25:28 +02:00
|
|
|
\begin{proof}
|
|
|
|
Nur $\K = \C$
|
|
|
|
\begin{enumerate}[label=\alph*)]
|
2023-01-31 13:30:38 +01:00
|
|
|
\item Satz \ref{theo:3.4.3} $\implies \exists P$ unitär mit $P^* AP =
|
|
|
|
\diag(\lambda_1, \dots, \lambda_n)$ o.B.d.A. $\lambda_1, \dots, \lambda_t > 0,
|
|
|
|
\lambda_{t+1}, \dots, \lambda_{r} < 0, \lambda_{r+1}, \dots, \lambda_n = 0$. \\
|
|
|
|
Setze $T =
|
|
|
|
\begin{pmatrix}
|
2022-06-13 10:53:40 +02:00
|
|
|
\frac{1}{\sqrt{\abs{ \lambda_1 }}} \\
|
|
|
|
& \ddots \\
|
|
|
|
& & \frac{1}{\sqrt{\abs{ \lambda_r }}} \\
|
|
|
|
& & & 1 \\
|
|
|
|
& & & & \ddots \\
|
|
|
|
& & & & & 1
|
2023-01-31 13:30:38 +01:00
|
|
|
\end{pmatrix}
|
|
|
|
$
|
2022-06-08 23:25:28 +02:00
|
|
|
$\implies S := PT$ ist invertierbar.
|
|
|
|
\begin{align*}
|
|
|
|
S^* A S & = T \underbrace{P^* A P}_{\mathclap{\diag{\lambda_1, \dots, \lambda_n}}} T = T
|
|
|
|
\begin{pmatrix}
|
|
|
|
\lambda_1 \\
|
|
|
|
& \ddots \\
|
|
|
|
& & \lambda_n
|
2023-01-31 13:30:38 +01:00
|
|
|
\end{pmatrix}
|
|
|
|
T \\
|
|
|
|
& =
|
|
|
|
\begin{pmatrix}
|
|
|
|
1 \\
|
|
|
|
& \ddots \\
|
|
|
|
& & 1 \\
|
|
|
|
& & & -1 \\
|
|
|
|
& & & & \ddots \\
|
|
|
|
& & & & & -1 \\
|
|
|
|
& & & & & & 0 \\
|
|
|
|
& & & & & & & \ddots \\
|
|
|
|
& & & & & & & & 0
|
|
|
|
\end{pmatrix}
|
2022-06-08 23:25:28 +02:00
|
|
|
\end{align*}
|
|
|
|
\item $A, B$ kongruent, das heißt $\exists Q$ invertierbar mit $B = Q^* A Q$\\
|
|
|
|
$ \implies \rg(B) = \rg(A)$.
|
|
|
|
Satz \ref{theo:3.4.3} $\implies \exists P_1, P_2$ unitär mit
|
|
|
|
\begin{align*}
|
2023-01-31 13:30:38 +01:00
|
|
|
P_1^* A P_1 & =
|
|
|
|
\begin{pmatrix}
|
|
|
|
\lambda_1 \\
|
|
|
|
& \ddots \\
|
|
|
|
& & \lambda_n
|
|
|
|
\end{pmatrix}
|
|
|
|
= D \\
|
|
|
|
P_2^* A P_2 & =
|
|
|
|
\begin{pmatrix}
|
|
|
|
\mu_1 \\
|
|
|
|
& \ddots \\
|
|
|
|
& & \mu_n
|
|
|
|
\end{pmatrix}
|
|
|
|
= G
|
2022-06-08 23:25:28 +02:00
|
|
|
\end{align*}
|
|
|
|
$t(A) := t, t(B) := s$ \\
|
|
|
|
Ordne so, dass $\ontop{\lambda_1, \dots, \lambda_t > 0,
|
|
|
|
\lambda_{t+1}, \dots, \lambda_r < 0,
|
|
|
|
\lambda_{r+1}, \dots, \lambda_n = 0}
|
2022-06-09 11:00:51 +02:00
|
|
|
{\mu_1, \dots, \mu_s > 0, \mu_{s+1}, \dots, \mu_r < 0, \mu_{r+1}, \dots, \mu_n =0}$ \\
|
2022-06-13 10:53:40 +02:00
|
|
|
Setze $a_i := \sqrt{\abs{\lambda_i}}, b_i := \sqrt{\abs{\mu_i}}$
|
2022-06-08 23:25:28 +02:00
|
|
|
\begin{equation}
|
|
|
|
\label{eq:3.4.6.1}
|
2022-06-13 10:53:40 +02:00
|
|
|
x^* D x = \sum_{j=1}^t a_j^2 \abs{ x_j }^2 - \sum_{j=t+1}^r a_j^2 \abs{x_j}^2
|
2022-06-08 23:25:28 +02:00
|
|
|
\end{equation}
|
|
|
|
$C := P_2 Q^{-1} P_1, y := Cx$
|
|
|
|
\begin{equation}
|
|
|
|
\label{eq:3.4.6.2}
|
2022-06-13 10:53:40 +02:00
|
|
|
x^* D x = x^* C^* G C x = y^* G y = \sum_{j=1}^s b_j^2 \abs{ y_j }^2 - \sum_{j=s+1}^r b_j^2
|
|
|
|
\abs{ y_j }^2
|
2022-06-08 23:25:28 +02:00
|
|
|
\end{equation}
|
|
|
|
Angenommen $t<s$:
|
|
|
|
\begin{equation}
|
|
|
|
\label{eq:3.4.6.3}
|
2023-01-31 13:30:38 +01:00
|
|
|
\begin{rcases}
|
|
|
|
x_i = 0 & i = 1, \dots, t \\
|
|
|
|
y_i = \sum_{j=1}^n C_{ij} x_j = 0 & i = s+1, \dots, n
|
2022-06-08 23:25:28 +02:00
|
|
|
\end{rcases}
|
|
|
|
\end{equation}
|
|
|
|
$\implies <n \text{ Gleichungen in }n$
|
2023-03-28 11:46:57 +02:00
|
|
|
Variablen $\implies \exists z \in \C^n \setminus \{0\}\colon$ \ref{eq:3.4.6.3} ist für $z$ erfüllt.
|
2022-06-08 23:25:28 +02:00
|
|
|
\begin{align*}
|
2022-06-13 10:53:40 +02:00
|
|
|
z^* D z & \overset{\text{\ref{eq:3.4.6.1}}}{=} - \sum_{j=t+1}^r a_j^2 \abs{ z_j }^2 < 0 \\
|
2022-06-08 23:25:28 +02:00
|
|
|
z^* D z & \overset{\text{\ref{eq:3.4.6.2}}}{=} \sum_{j=1}^s b_j^2 \abs{y_j}^2 > 0
|
2022-06-13 10:53:40 +02:00
|
|
|
& & \text{\Lightning}
|
2022-06-08 23:25:28 +02:00
|
|
|
\end{align*}
|
|
|
|
$\implies s = t$
|
|
|
|
\end{enumerate}
|
|
|
|
\end{proof}
|
2022-06-07 19:16:17 +02:00
|
|
|
|
|
|
|
\subsubsection{Berechnung des Trägheitsindex:}
|
2023-01-31 13:30:38 +01:00
|
|
|
Sei $A$ symmetrisch hermitesch, $\det(A) \neq 0 \implies \chi_A(\lambda) = a_1
|
|
|
|
\lambda^n + \dots + \underset{\mathrlap{\rotatebox{325}{\scriptsize$\neq
|
|
|
|
0$}}}{a_0}$ mit $a_i \in \R$ ist Polynom mit lauter reellen Nullstellen.
|
2022-06-07 19:16:17 +02:00
|
|
|
|
2023-01-31 13:30:38 +01:00
|
|
|
\begin{satz}[Vorzeichenregel von Descartes]
|
2022-06-07 19:16:17 +02:00
|
|
|
Sei $p$ Polynom mit $p(0) \neq 0$, reellen Koeffizienten und lauter reellen Nullstellen. Dann gilt
|
|
|
|
\[
|
2023-03-28 11:46:57 +02:00
|
|
|
\abs{ \{ \lambda\colon p(\lambda) = 0 \land \lambda > 0 \} }
|
|
|
|
= \abs{ i \in \{0, \dots n-1 \}\colon a_i a_{i+1} < 0 }
|
2022-06-07 19:16:17 +02:00
|
|
|
\]
|
|
|
|
\end{satz}
|
|
|
|
|
2022-06-01 10:02:40 +02:00
|
|
|
\section{Bilinearformen und Sesquilinearformen}
|
|
|
|
|
|
|
|
\begin{defin}
|
|
|
|
\begin{itemize}
|
2023-03-28 11:46:57 +02:00
|
|
|
\item $\sigma\colon V\times V \to \K$ mit $\forall u,v,w \in V, \lambda \in \K$:
|
2022-06-01 10:02:40 +02:00
|
|
|
\begin{align}
|
|
|
|
\sigma(v+w, u) & = \sigma(v, u) + \sigma(w, u) \nonumber \\
|
|
|
|
\sigma(v, w+u) & = \sigma(v, w) + \sigma(v, u) \nonumber \\
|
|
|
|
\sigma(\lambda v, w) & = \lambda \sigma(v, w) \nonumber \\
|
|
|
|
\sigma(v, \lambda w) & = \lambda \sigma(v, w) \label{eq:3.5.1.1}
|
|
|
|
\end{align}
|
|
|
|
heißt \underline{Bilinearform}.
|
2023-01-31 13:30:38 +01:00
|
|
|
\item Falls $\K = \C$ und anstelle von \ref{eq:3.5.1.1} gilt, dass $\sigma(v, \lambda
|
|
|
|
w) = \overline \lambda \sigma(v,w)$, so heißt $\sigma$
|
|
|
|
\underline{Sesquilinearform}.
|
|
|
|
\item Eine Bilinearform heißt \underline{symmetrisch}, wenn $\sigma(u, v) = \sigma(v,
|
|
|
|
u)$ und \\ \underline{alternierend}, wenn $\sigma(u, v) = -\sigma(v, u)$.
|
|
|
|
\item Eine Sesquilinearform heißt \underline{hermitesch}, wenn $\sigma(u, v) =
|
|
|
|
\overline{\sigma(v, u)}$
|
2022-06-01 10:02:40 +02:00
|
|
|
\end{itemize}
|
|
|
|
\end{defin}
|
|
|
|
|
|
|
|
\subsubsection{Beispiel}
|
|
|
|
\begin{itemize}
|
|
|
|
\item Euklidisches Skalarprodukt ist symmetrische Bilinearform.
|
2023-01-31 13:30:38 +01:00
|
|
|
\item Unitäres Skalarprodukt ist hermitesche Bilinearform. \item
|
|
|
|
\begin{equation}
|
|
|
|
\label{eq:3.5.1.2}
|
|
|
|
\sigma(x, y) = x_1 y_1 + x_1 y_2 + x_2 y_1 - 5 x_2 y_2 =
|
|
|
|
\begin{pmatrix}
|
|
|
|
x_1 & x_2
|
|
|
|
\end{pmatrix}
|
|
|
|
\begin{pmatrix}
|
|
|
|
1 & 1 \\
|
|
|
|
1 & -5
|
|
|
|
\end{pmatrix}
|
|
|
|
\begin{pmatrix}
|
|
|
|
y_1 \\
|
|
|
|
y_2
|
|
|
|
\end{pmatrix}
|
2022-06-01 10:02:40 +02:00
|
|
|
\end{equation}
|
|
|
|
$V=\R^2$
|
|
|
|
$ q(x) := \sigma(x, x) = x_1^2 + x_1 x_2 + x_2 x_1 - 5x_2^2, \R^2 \to \R$
|
|
|
|
\end{itemize}
|
|
|
|
Sei $ B= (b_1, \dots, b_n)$ Basis, so ist $M_B(\sigma) := (\sigma(b_i, b_j))_{i,j=1}^n$
|
|
|
|
|
|
|
|
\begin{lemma}
|
|
|
|
\label{theo:3.5.2}
|
|
|
|
\begin{enumerate}[label=\alph*)]
|
2022-06-07 19:16:17 +02:00
|
|
|
\item Es gilt für $\sigma$ Bilinearform und $B$ Basis, dass
|
2022-06-07 19:25:40 +02:00
|
|
|
\[
|
|
|
|
\sigma(u, v) = {}_B \Phi(u)^T M_B(\sigma) {}_B \Phi(v) \; \forall u, v
|
|
|
|
\]
|
|
|
|
\item Es gilt für $\sigma$ Sequilinearform, dass
|
2022-06-07 19:16:17 +02:00
|
|
|
\[
|
|
|
|
\sigma(u, v) = {}_B \Phi(u)^T M_B(\sigma) {}_B \overline{\Phi(v)} \; \forall u, v
|
|
|
|
\]
|
2022-06-07 19:25:40 +02:00
|
|
|
\item Sei $B'$ eine weitere Basis und $\K = \R$
|
|
|
|
\[
|
|
|
|
M_{B'}(\sigma) = {{}_B M(\id)_{B'}}^T \, M_B(\sigma) \, {}_B M(\id)_{B'}
|
|
|
|
\]
|
|
|
|
\item Sei $B'$ eine weitere Basis und $\K = \C$
|
2022-06-01 10:02:40 +02:00
|
|
|
\[
|
2022-06-02 11:57:46 +02:00
|
|
|
M_{B'}(\sigma) = {{}_B M(\id)_{B'}}^T \, M_B(\sigma) \, {}_B \overline{M(\id)}_{B'}
|
2022-06-01 10:02:40 +02:00
|
|
|
\]
|
|
|
|
\item $\sigma$ symmetrisch/hermitesch $\iff M_B(\sigma)^* = M_B(\sigma)$
|
|
|
|
\end{enumerate}
|
|
|
|
\end{lemma}
|
2022-06-08 23:25:28 +02:00
|
|
|
\begin{proof}
|
|
|
|
\begin{enumerate}[label=\alph*)]
|
|
|
|
\item Analog wie b)
|
|
|
|
\item $u = \sum \lambda_i b_i, v=\sum \mu_j b_j, A = M_B(\sigma)$
|
|
|
|
\begin{align*}
|
2022-06-15 19:34:43 +02:00
|
|
|
\sigma(u, v) & = \sigma\left(\sum \lambda_i b_i, \sum \mu_j b_j\right) = \sum_{i=1}^n \lambda_i
|
2022-06-08 23:25:28 +02:00
|
|
|
\underbrace{\sum_{j=1}^n \underbrace{\sigma(b_i, b_j)}_{a_{ij}} \overline \mu_j}
|
2022-06-15 19:34:43 +02:00
|
|
|
_{A {}_B\overline{\Phi(v)}} \\
|
2022-06-08 23:25:28 +02:00
|
|
|
& = {}_B \Phi(u)^T M_B(\sigma) {}_B \overline{\Phi(v)}
|
|
|
|
\end{align*}
|
|
|
|
\item Analog wie d)
|
|
|
|
\item $b'_i = \sum_k a_{ki} b_k, M_{B'} = (\sigma(b'_i, b'_j))_{i,j}$
|
|
|
|
\begin{align*}
|
2022-06-15 19:34:43 +02:00
|
|
|
\sigma(b'_i, b'_j) & = \sigma\left(\sum_k a_{ki} b_k, \sum_l a_{lj} b_l\right)
|
|
|
|
= \sum_k a_{ki} \sum_l \underbrace{\sigma(b_i,b_j)}_{M_B(\sigma)} \overline{a_{lj}} \\
|
|
|
|
& = \left(A^T M_B(\sigma)\overline A\right)_{ij}
|
2022-06-08 23:25:28 +02:00
|
|
|
\end{align*}
|
2023-01-31 13:30:38 +01:00
|
|
|
\item
|
|
|
|
\begin{align*}
|
2022-06-08 23:25:28 +02:00
|
|
|
\underset{\rotatebox{70}{$=$}}{\sigma(v, w)}
|
2023-01-31 13:30:38 +01:00
|
|
|
& = \underset{\rotatebox{110}{$=$}}{\overline{\sigma(w, v)}} \\{}_B \Phi(v)^T M_B(\sigma) {}_B \overline{\Phi(w)}
|
|
|
|
& = \left({}_B\overline{\Phi(w)}^T \overline{M_B(\sigma)} {}_B \Phi(v)\right)^T \\
|
|
|
|
& = {}_B \Phi(v)^T \overline{M_B(\sigma)}^T {}_B \overline{\Phi(w)} \\
|
2022-06-08 23:25:28 +02:00
|
|
|
\implies M_B(\sigma) = M_B(\sigma)^*
|
|
|
|
\end{align*}
|
|
|
|
\end{enumerate}
|
|
|
|
\end{proof}
|
2022-06-01 10:02:40 +02:00
|
|
|
|
|
|
|
\begin{satz}
|
2022-06-08 23:25:28 +02:00
|
|
|
|
2023-01-31 13:30:38 +01:00
|
|
|
Sei $V$ euklidischer/unitärer Vektorraum und $\sigma$ symmetrische/hermitesche
|
|
|
|
Bilinear-/Sesquilinearform. Dann existiert eine Orthonormalbasis $B$ mit \\
|
|
|
|
$M_B(\sigma)$ reelle Diagonalmatrix.
|
2022-06-01 10:02:40 +02:00
|
|
|
\end{satz}
|
2022-06-08 23:25:28 +02:00
|
|
|
\begin{proof}
|
|
|
|
Nach Lemma \ref{theo:3.5.2} e) gilt $M_B(\sigma)^* = M_B(\sigma)$ und daher nach Satz \ref{theo:3.4.3}
|
|
|
|
gibt es orthogonale/unitäre Matrix $U$ mit
|
|
|
|
\[
|
2023-01-31 13:30:38 +01:00
|
|
|
U^* M_B(\sigma) U = \left(
|
|
|
|
\begin{smallmatrix}
|
2022-06-08 23:25:28 +02:00
|
|
|
\lambda_1 \\ & \ddots \\ & & \lambda_n
|
2023-01-31 13:30:38 +01:00
|
|
|
\end{smallmatrix}
|
|
|
|
\right), \lambda_i \in \R \forall i \in [n]
|
2022-06-08 23:25:28 +02:00
|
|
|
\]
|
|
|
|
Behauptung folgt dann aus Lemma \ref{theo:3.5.2} d).
|
|
|
|
\end{proof}
|
2022-06-01 10:02:40 +02:00
|
|
|
|
|
|
|
\subsubsection{Beispiel}
|
2023-01-31 13:30:38 +01:00
|
|
|
$\sigma$ wie in \ref{eq:3.5.1.2}, $A =
|
|
|
|
\begin{pmatrix}
|
|
|
|
1 & 1 \\
|
|
|
|
1 & -5
|
|
|
|
\end{pmatrix}
|
|
|
|
$ \\
|
|
|
|
$\chi_a(\lambda) = \det
|
|
|
|
\begin{pmatrix}
|
|
|
|
1-\lambda & 1 \\
|
|
|
|
1 & -5-\lambda
|
|
|
|
\end{pmatrix}
|
2022-06-01 10:02:40 +02:00
|
|
|
= (\lambda -1)(5 + \lambda) - 1$ \\
|
|
|
|
Nullstellen: $\lambda_1, \lambda_2 = -2 \pm \sqrt{10}$\\
|
2023-01-31 13:30:38 +01:00
|
|
|
$b_1 = \frac{1}{\sqrt{20+6\sqrt{10}}}
|
|
|
|
\begin{pmatrix}
|
|
|
|
3 + \sqrt{10} \\
|
|
|
|
1
|
|
|
|
\end{pmatrix}
|
|
|
|
,
|
|
|
|
b_2 = \frac{1}{\sqrt{20-6\sqrt{10}}}
|
|
|
|
\begin{pmatrix}
|
|
|
|
3 - \sqrt{10} \\
|
|
|
|
1
|
|
|
|
\end{pmatrix}
|
|
|
|
$ \\
|
|
|
|
$M_B(\sigma) =
|
|
|
|
\begin{pmatrix}
|
|
|
|
-2 + \sqrt{10} & 0 \\
|
|
|
|
0 & -2 -\sqrt{10}
|
|
|
|
\end{pmatrix}
|
|
|
|
$ \\
|
2022-06-01 10:02:40 +02:00
|
|
|
$ \implies q(\tilde x_1, \tilde x_2) = \lambda_1 \tilde x_1^2 + \lambda_2 \tilde x_2^2 \equiv c$
|
2022-06-01 16:29:47 +02:00
|
|
|
|
|
|
|
\begin{tikzpicture}[scale=1.8]
|
|
|
|
\tikzmath{
|
|
|
|
\bfactor1 = 1 / sqrt(20 + 6* sqrt(10));
|
|
|
|
\bfactor2 = 1 / sqrt(20 - 6* sqrt(10));
|
|
|
|
\bx1 = \bfactor1 * (3 + sqrt(10));
|
|
|
|
\by1 = \bfactor1;
|
|
|
|
\bx2 = \bfactor2 * (3 - sqrt(10));
|
|
|
|
\by2 = \bfactor2;
|
|
|
|
\brichtung1 = \by1 / \bx1;
|
|
|
|
\brichtung2 = \by2 / \bx2;
|
|
|
|
}
|
|
|
|
\begin{axis}[
|
2022-06-01 16:59:42 +02:00
|
|
|
title=\scriptsize{Niveaulinien von $q(x)$},
|
2022-06-01 16:29:47 +02:00
|
|
|
xlabel={$x_1$},
|
|
|
|
ylabel={$x_2$},
|
|
|
|
ymin=-2.7,ymax=2.7,
|
|
|
|
xmin=-3.3,xmax=3.3,
|
|
|
|
view={0}{90},
|
|
|
|
axis lines=middle,
|
|
|
|
tick label style={font=\tiny},
|
2022-06-01 16:59:42 +02:00
|
|
|
label style={font=\scriptsize},
|
2022-06-01 16:29:47 +02:00
|
|
|
]
|
|
|
|
\addplot3 [
|
|
|
|
contour gnuplot={
|
|
|
|
levels={0,-1,-4,-9,-16,-25,-36,1,4,9},
|
|
|
|
contour label style={every node/.append style={text=ForestGreen}},
|
|
|
|
label distance = 90pt,
|
|
|
|
},
|
|
|
|
samples=80,
|
|
|
|
domain=-3.3:3.3,
|
|
|
|
domain y=-2.7:2.7,
|
|
|
|
contour/draw color={ForestGreen},
|
|
|
|
]
|
|
|
|
{x^2 + 2*x*y - 5*y^2};
|
2022-06-01 19:53:37 +02:00
|
|
|
\draw [->, red, thick] (0,0) -- (\bx1, \by1) node[above]{\footnotesize$b_1$};
|
|
|
|
\draw [->, red, thick] (0,0) -- (\bx2, \by2) node[above right]{\footnotesize$b_2$};
|
2022-06-01 16:29:47 +02:00
|
|
|
\addplot [
|
|
|
|
domain=-3.3:3.3,
|
|
|
|
color=red,
|
|
|
|
style={dash pattern=on 3pt off 2pt on 15pt off 2pt},
|
|
|
|
]
|
|
|
|
{x * \brichtung1};
|
|
|
|
\addplot [
|
|
|
|
domain=-3.3:3.3,
|
|
|
|
color=red,
|
|
|
|
style={dash pattern=on 3pt off 2pt on 15pt off 2pt},
|
|
|
|
]
|
|
|
|
{x * \brichtung2};
|
|
|
|
\end{axis}
|
|
|
|
\end{tikzpicture}
|
2022-06-01 10:02:40 +02:00
|
|
|
|
2022-06-07 19:16:17 +02:00
|
|
|
\begin{defin}
|
2023-03-28 11:46:57 +02:00
|
|
|
Sei $\rho\colon V \to \K$ heißt \underline{quadratische Form} wenn $\forall u, v \in V, \lambda \in \K\colon$
|
2022-06-07 19:16:17 +02:00
|
|
|
\begin{enumerate}[label=\alph*)]
|
|
|
|
\item $\rho(\lambda v) = \lambda^2 \rho(v)$
|
|
|
|
\item $ \sigma(u, v) := \rho(u + v) - \rho(u) - \rho (v)$ ist eine (symmetrische) Bilinearform
|
|
|
|
\end{enumerate}
|
|
|
|
\end{defin}
|
|
|
|
|
|
|
|
\begin{lemma}
|
2022-06-08 23:25:28 +02:00
|
|
|
|
2023-01-31 13:30:38 +01:00
|
|
|
Sei $\operatorname{char}(\K) \neq 2$. Dann entsprechen die quadratischen Formen
|
|
|
|
und symmetrischen Bilinearformen einander eineindeutig.
|
2022-06-07 19:16:17 +02:00
|
|
|
\end{lemma}
|
2022-06-08 23:25:28 +02:00
|
|
|
\begin{proof}
|
|
|
|
$\rho$ quadratische Form $\implies \sigma(v, w) = \rho(u + v) - \rho(u) - \rho(v)$ ist symmetrische
|
|
|
|
Bilinearform. \\
|
2022-06-09 15:32:27 +02:00
|
|
|
Sei umgekehrt $\sigma$ symmetrische Bilinearform,
|
|
|
|
$\rho(v) := \underset{\mathclap{\substack{\rotatebox{90}{$\to$}\\\operatorname{char}(\K) \neq 2}}}
|
|
|
|
{\frac 12} \sigma(v, v)$.
|
2022-06-08 23:25:28 +02:00
|
|
|
\begin{align*}
|
|
|
|
\rho(\lambda v) = \frac 12 \sigma(\lambda v, \lambda v) & = \lambda^2 \frac 12 \sigma(v, v) =
|
2023-01-31 13:30:38 +01:00
|
|
|
\lambda^2 \rho(v) \implies \text{a)} \\
|
|
|
|
\rho(u+v) - \rho(u) - \rho(v) & = \frac 12(\sigma(u + v, u + v) - \sigma(u, u) - \sigma(v, v)) \\
|
|
|
|
&
|
|
|
|
\begin{multlined}
|
|
|
|
= \frac 12(\cancel{\sigma(u, u)} + \sigma(u, v) + \sigma(v, u)
|
|
|
|
+ \cancel{\sigma(v, v)} \\
|
|
|
|
- \cancel{\sigma(u, u)} - \cancel{\sigma(v, v)})
|
|
|
|
\end{multlined}
|
|
|
|
\\
|
2022-06-08 23:25:28 +02:00
|
|
|
& = \sigma(u, v) \text{ ist symmetrische Bilinearform.}
|
|
|
|
\end{align*}
|
|
|
|
\end{proof}
|
2022-06-07 19:16:17 +02:00
|
|
|
|
|
|
|
\begin{defin}
|
2022-06-09 10:17:48 +02:00
|
|
|
\label{theo:3.5.6}
|
2023-03-28 11:46:57 +02:00
|
|
|
Sei $V\, \C$-VR. $\rho\colon V \to \R$ heißt \underline{hermitesche Form} wenn $\forall u, v \in V, \lambda \in \C$:
|
2022-06-07 19:16:17 +02:00
|
|
|
\begin{enumerate}[label=\alph*)]
|
|
|
|
\item $\rho(\lambda v) = \abs{\lambda}^2 \rho(v)$
|
|
|
|
\item $\rho(u+v) + \rho(u -v) = 2(\rho(u) + \rho(v))$
|
|
|
|
\item $\sigma(u,v) := \frac 12 (\rho(u+v) + i\rho(u +iv) - (1+i)(\rho(u) + \rho(v)))$ ist hermitesche
|
|
|
|
Sesquilinearform.
|
|
|
|
\end{enumerate}
|
|
|
|
\end{defin}
|
|
|
|
|
|
|
|
\begin{lemma}
|
|
|
|
Hermitesche Formen und hermitesche Sesquilinearformen entsprechen einander eineindeutig
|
|
|
|
\end{lemma}
|
2022-06-08 23:25:28 +02:00
|
|
|
\begin{proof}
|
2022-06-09 10:18:13 +02:00
|
|
|
Für hermitesche Form ist durch Definition \ref{theo:3.5.6} c) eine hermitesche Sesquilinearform definiert. \\
|
|
|
|
Sei umgekehrt $\sigma$ hermitesche Sesquilinearform. Dann ist $\rho(v) := \frac12 \sigma(v, v)$ hermitesche
|
|
|
|
Form:
|
|
|
|
\begin{enumerate}[label=\alph*)]
|
|
|
|
\item \checkmark
|
|
|
|
\item \begin{align*}
|
|
|
|
\rho(u+v) + \rho(u - v) &= \sigma(u+v, u+v) + \sigma(u-v, u-v) \\
|
2023-01-31 13:30:38 +01:00
|
|
|
&
|
|
|
|
\begin{multlined}
|
|
|
|
= \sigma(u, u) + \sigma(v, v) + \sigma(u, v) + \sigma(v, u)
|
2022-06-09 10:18:13 +02:00
|
|
|
+ \sigma(u, u)\\ + \sigma(v, v) - \sigma(u, v) - \sigma(v, u)
|
2023-01-31 13:30:38 +01:00
|
|
|
\end{multlined}
|
|
|
|
\\
|
2022-06-09 10:18:13 +02:00
|
|
|
&= 2\sigma(u, u) + 2\sigma(v, v) \\
|
|
|
|
&= 2(\rho(u) + \rho(v))
|
|
|
|
\end {align*}
|
2023-01-31 13:30:38 +01:00
|
|
|
\item
|
|
|
|
\begin{align*}
|
2022-06-09 10:18:13 +02:00
|
|
|
\frac12 (\rho(u+v) + i\rho(u+iv)
|
2023-01-31 13:30:38 +01:00
|
|
|
& - (1+i)(\rho(u)+\rho(v))) = \\
|
|
|
|
&
|
|
|
|
\begin{multlined}
|
|
|
|
= \sigma(u+v, u+v) + i \sigma(u+iv,u+iv) \\- \sigma(u, u) - \sigma(v, v) - i\sigma(u, u) -
|
|
|
|
i \sigma(v, v)
|
|
|
|
\end{multlined}
|
|
|
|
\\
|
|
|
|
& = \sigma(u, v) + \sigma(v, u) + i \sigma(iv, u) + i \sigma(u, iv) \\
|
|
|
|
& = \sigma(u, v) + \overline{\sigma(u, v)} + i \overline{\sigma(u, iv)} + \sigma(u, v) \\
|
2022-06-09 10:18:13 +02:00
|
|
|
& = \sigma(u, v) + \overline{\sigma(u, v)} + i \cdot \overline{\overline{i}} \cdot
|
|
|
|
\overline{\sigma(u, v)}
|
2023-01-31 13:30:38 +01:00
|
|
|
+ \sigma(u, v) \\
|
2022-06-09 10:18:13 +02:00
|
|
|
& = 2 \sigma(u, v)
|
|
|
|
\end{align*}
|
|
|
|
\end{enumerate}
|
2022-06-08 23:25:28 +02:00
|
|
|
\end{proof}
|
2022-06-07 19:16:17 +02:00
|
|
|
|
|
|
|
\subsubsection{Bemerkung}
|
|
|
|
$\sigma$ heißt Polarform von $\rho$
|
|
|
|
|
2022-06-08 18:03:22 +02:00
|
|
|
\section[Die Singulärwertzerlegung und die Pseudoinverse]{Die Singulärwertzerlegung und die \\Pseudoinverse}
|
2022-06-07 19:16:17 +02:00
|
|
|
|
2023-01-31 13:30:38 +01:00
|
|
|
Wir wollen nun für zwei euklidische Vektorräume $V, W$ eine geeignete
|
|
|
|
Normalform \\ bezüglich Orthonormalbasen herleiten. Polarzerlegung besagt für
|
|
|
|
$\alpha \in \Hom(V, V)$, dass Orthonormalbasen $B, B'$ von $V$ existieren mit
|
2022-06-07 19:16:17 +02:00
|
|
|
\[
|
2023-01-31 13:30:38 +01:00
|
|
|
{}_B M(\alpha)_B =
|
|
|
|
\begin{pmatrix}
|
2022-06-09 14:43:42 +02:00
|
|
|
s_1 \\
|
2022-06-07 19:16:17 +02:00
|
|
|
& \ddots \\
|
|
|
|
& & s_r \\
|
|
|
|
& & & 0 \\
|
|
|
|
& & & & \ddots \\
|
2022-06-09 14:43:42 +02:00
|
|
|
& & & & & 0
|
2023-01-31 13:30:38 +01:00
|
|
|
\end{pmatrix}
|
|
|
|
, s_1, \dots, s_n > 0
|
2022-06-07 19:16:17 +02:00
|
|
|
\]
|
2023-01-31 13:30:38 +01:00
|
|
|
Das heißt $\alpha$ lässt sich aus orthogonalen Endomorphismen und Skalierung
|
|
|
|
zusammensetzen.
|
2022-06-07 19:16:17 +02:00
|
|
|
|
2023-01-31 13:30:38 +01:00
|
|
|
\begin{satz}[Singulärwertzerlegung]
|
2022-06-07 19:16:17 +02:00
|
|
|
Sei $A \in \R^{m \times n} / \C^{m \times n}$. Dann gibt es orthogonale/unitäre Matrizen $U, V$ sowie
|
|
|
|
$s_1, \dots, s_r \in (0, \infty)$ mit
|
|
|
|
\[
|
2023-01-31 13:30:38 +01:00
|
|
|
A = \underbrace{U}_{\K^{m\times m}} \underbrace{
|
|
|
|
\begin{pmatrix}
|
|
|
|
s_1 \\
|
2022-06-07 19:16:17 +02:00
|
|
|
& \ddots \\
|
|
|
|
& & s_r \\
|
|
|
|
& & & 0 \\
|
|
|
|
& & & & \ddots \\
|
2023-01-31 13:30:38 +01:00
|
|
|
& & & & & 0
|
|
|
|
\end{pmatrix}
|
|
|
|
}_{\K^{m \times n}} \underbrace{V}_{\K^{n \times n}}
|
2022-06-07 19:16:17 +02:00
|
|
|
\]
|
2022-06-09 14:43:42 +02:00
|
|
|
$s_1, \dots, s_r$ heißen \underline{Singulärwerte} von $A$.
|
2022-06-07 19:16:17 +02:00
|
|
|
\end{satz}
|
2022-06-08 23:25:28 +02:00
|
|
|
\begin{proof}
|
|
|
|
\begin{itemize}
|
2022-06-09 16:15:44 +02:00
|
|
|
\item $A^* A \in \K^{\nxn}$ selbstadjungiert und positiv semi-definit. \\
|
2022-06-08 23:25:28 +02:00
|
|
|
Eigenwerte $\lambda_1, \dots, \lambda_n \in [0, \infty)$, ONB $b_1, \dots, b_n$ aus Eigenvektoren.
|
|
|
|
Sei $\lambda_1, \dots, \lambda_r \in (0, \infty), \lambda_{r+1} = \dots = \lambda_n = 0$
|
|
|
|
$s_i := \sqrt{\lambda_i}, i\in [n]$
|
2023-01-31 13:30:38 +01:00
|
|
|
\item Es gilt, dass $\overbrace{\frac 1{s_1} A b_1}^{b_1'}, \dots, \overbrace{\frac
|
|
|
|
1{s_r} A b_r}^{b_r'}$ Orthonormalsystem in $\K^m$ ist.
|
2022-06-09 15:32:27 +02:00
|
|
|
\begin{align*}
|
|
|
|
\overline{\inner{Ab_i}{Ab_j}}_{\K^m} & = \overline{b_i^T A^T \overline A \,\overline{b_j}}
|
|
|
|
= \overline{b_i}^T A^* A b_j = \lambda_j \overline{b_i}^T b_j \\
|
|
|
|
& = \lambda_j \overline{\inner{b_i}{b_j}}_{\K^n}
|
|
|
|
= \lambda_j \delta_{ij} \in \R
|
|
|
|
\end{align*}
|
2023-01-31 13:30:38 +01:00
|
|
|
\item Ergänze $b_1', \dots, b_r'$ zu Orthonormalbasis $b_1', \dots, b_r', \dots,
|
2023-03-28 11:46:57 +02:00
|
|
|
b_m'$ von $\K^m$. \\ Sei $\varphi_A\colon x \mapsto A\cdot x \implies {}_{B'}
|
2023-01-31 13:30:38 +01:00
|
|
|
M(\varphi_A)_B = \left(
|
|
|
|
\begin{smallmatrix}
|
2022-06-08 23:25:28 +02:00
|
|
|
s_1 \\
|
|
|
|
& \ddots \\
|
|
|
|
& & s_r \\
|
|
|
|
& & & 0 \\
|
|
|
|
& & & & \ddots \\
|
|
|
|
& & & & & 0
|
2023-01-31 13:30:38 +01:00
|
|
|
\end{smallmatrix}
|
|
|
|
\right)$ \\
|
2022-06-08 23:25:28 +02:00
|
|
|
$v = \sum \mu_i b_i \implies Av = \sum \mu_i \underbrace{A b_i}_{s_i b_i'} = \sum \mu_i s_i b_i'$
|
|
|
|
\end{itemize}
|
|
|
|
\end{proof}
|
2022-06-07 19:16:17 +02:00
|
|
|
|
2023-01-31 13:30:38 +01:00
|
|
|
Mittels der Singulärwertzerlegung können wir für jede Matrix (bzw. lineare
|
|
|
|
Abbildung) eine verallgemeinerte Inverse berechnen.
|
2022-06-09 14:43:42 +02:00
|
|
|
|
2023-01-31 13:30:38 +01:00
|
|
|
Sei ${}_{B'} M(\alpha)_B =
|
|
|
|
\begin{pmatrix}
|
|
|
|
s_1 \\
|
2022-06-09 14:43:42 +02:00
|
|
|
& \ddots \\
|
|
|
|
& & s_r \\
|
|
|
|
& & & 0 \\
|
|
|
|
& & & & \ddots \\
|
2023-01-31 13:30:38 +01:00
|
|
|
& & & & & 0
|
|
|
|
\end{pmatrix}
|
|
|
|
$ \\
|
2022-06-13 11:29:12 +02:00
|
|
|
$\implies \ker(\alpha) = \linspan{ b_{r+1}, \dots, b_n }_V, \im(\alpha) = \linspan{b'_1, \dots b_r'},
|
|
|
|
\ker(\alpha)^\bot = \linspan{ b_1, \dots, b_r }_V$
|
2022-06-09 14:43:42 +02:00
|
|
|
|
2022-06-08 18:06:01 +02:00
|
|
|
\begin{align*}
|
2023-03-28 11:46:57 +02:00
|
|
|
\alpha\colon V & \to
|
2023-01-31 13:30:38 +01:00
|
|
|
\ker(\alpha)^{\bot} & & \overset{\beta}{\to}
|
|
|
|
\im(\alpha) & & \to
|
|
|
|
W \\
|
|
|
|
\sum_{i=1}^n \lambda_i b_i & \mapsto
|
|
|
|
\sum_{i=1}^r \lambda_i b_i & & \mapsto
|
|
|
|
\sum_{i=1}^r s_i \lambda_i b_i' & & \mapsto
|
|
|
|
\sum_{i=1}^r s_i \lambda_i b_i' \\
|
|
|
|
\left(
|
|
|
|
\begin{smallmatrix}
|
|
|
|
x_1 \\ \vdots \\ x_n
|
|
|
|
\end{smallmatrix}
|
|
|
|
\right) & \mapsto
|
|
|
|
\left(
|
|
|
|
\begin{smallmatrix}
|
|
|
|
x_1 \\ \vdots \\ x_r
|
|
|
|
\end{smallmatrix}
|
|
|
|
\right) & & \mapsto
|
|
|
|
\left(
|
|
|
|
\begin{smallmatrix}
|
|
|
|
s_1 x_1 \\ \vdots \\ s_r x_r
|
|
|
|
\end{smallmatrix}
|
|
|
|
\right) & & \mapsto
|
|
|
|
\left.\left(
|
|
|
|
\begin{smallmatrix}
|
|
|
|
s_1 x_1 \\ \vdots \\ s_r x_r \\ 0 \\ \vdots \\ 0
|
|
|
|
\end{smallmatrix}
|
|
|
|
\right)\right\} m \\
|
|
|
|
\sum_{i=1}^r \frac{\mu_i}{s_i} b_i & \mapsfrom
|
|
|
|
\sum_{i=1}^r \frac{1}{s_i} \mu_i b_i & & \underset{\beta^{-1}}{\mapsfrom}
|
|
|
|
\sum_{i=1}^r \mu_i b_i' & & \mapsfrom
|
2022-06-09 14:43:42 +02:00
|
|
|
\sum_{i=1}^m \mu_i b_i' = w \cdot \alpha^{+}
|
2022-06-08 18:06:01 +02:00
|
|
|
\end{align*}
|
|
|
|
|
|
|
|
\subsubsection{Bemerkung:}
|
2022-06-09 18:15:13 +02:00
|
|
|
$\alpha$ invertierbar, $V=W \implies n = m = r \implies \alpha^\dagger = \alpha^{-1}$ \\
|
2022-06-09 14:43:42 +02:00
|
|
|
Wir haben eine echte Verallgemeinerung.
|
2022-06-08 18:06:01 +02:00
|
|
|
|
|
|
|
\begin{defin}
|
|
|
|
\leavevmode
|
|
|
|
\begin{itemize}
|
|
|
|
\item Sei $A \in \K^{m \times n}, \K \in \{\R,\C\}$ mit
|
|
|
|
\[
|
2023-01-31 13:30:38 +01:00
|
|
|
A = U^* \Sigma V, \Sigma = \left(
|
|
|
|
\begin{smallmatrix}
|
2022-06-08 18:06:01 +02:00
|
|
|
s_1 \\
|
|
|
|
& \ddots \\
|
|
|
|
& & s_r \\
|
|
|
|
& & & 0 \\
|
|
|
|
& & & & \ddots \\
|
|
|
|
& & & & & 0
|
2023-01-31 13:30:38 +01:00
|
|
|
\end{smallmatrix}
|
|
|
|
\right)
|
2022-06-08 18:06:01 +02:00
|
|
|
\]
|
|
|
|
Dann heißt die Matrix
|
|
|
|
\[
|
2023-01-31 13:30:38 +01:00
|
|
|
A^\dagger = V^* \Sigma^\dagger U \in \K^{n \times m}, \Sigma^\dagger = \left(
|
|
|
|
\begin{smallmatrix}
|
2022-06-08 18:06:01 +02:00
|
|
|
\frac1{s_1} \\
|
|
|
|
& \ddots \\
|
|
|
|
& & \frac1{s_r} \\
|
|
|
|
& & & 0 \\
|
|
|
|
& & & & \ddots \\
|
|
|
|
& & & & & 0
|
2023-01-31 13:30:38 +01:00
|
|
|
\end{smallmatrix}
|
|
|
|
\right)
|
2022-06-08 18:06:01 +02:00
|
|
|
\]
|
|
|
|
(Moore-Penrose) \underline{Pseudoinverse} von $A$.
|
2023-01-31 13:30:38 +01:00
|
|
|
\item Sei $\alpha \in \homk(V, W), \dim(V), \dim(W) < \infty$ und $B, B'$
|
|
|
|
Orthonormalbasen mit ${}_{B'} M(\alpha)_B = \Sigma$ und $\alpha^\dagger$ so,
|
|
|
|
dass ${}_B M(\alpha^\dagger)_{B'} = \Sigma^\dagger$. Dann heißt
|
|
|
|
$\alpha^\dagger$ (Moore-Penrose) \underline{Pseudoinverse} von $\alpha$.
|
2022-06-08 18:06:01 +02:00
|
|
|
\end{itemize}
|
|
|
|
\end{defin}
|
|
|
|
|
2022-06-09 10:17:48 +02:00
|
|
|
\begin{satz}
|
|
|
|
\label{theo:3.6.3}
|
2022-06-09 15:14:39 +02:00
|
|
|
Seien $V, W$ endlich dimensional euklidische/unitäre Vektorräume, \\
|
2022-06-09 10:17:48 +02:00
|
|
|
$\alpha \in \Hom(V, W)$. Dann gilt:
|
|
|
|
\[
|
2023-01-31 13:30:38 +01:00
|
|
|
\alpha^\dagger \text{ ist pseudoinverse} \iff
|
|
|
|
\begin{aligned}
|
2022-06-09 18:15:13 +02:00
|
|
|
& \alpha \circ \alpha^\dagger \circ \alpha = \alpha \\
|
|
|
|
& \alpha^\dagger \circ \alpha \circ \alpha^\dagger = \alpha^\dagger \\
|
|
|
|
& \alpha \circ \alpha^\dagger \text{ selbstadjungiert} \\
|
|
|
|
& \alpha^\dagger \circ \alpha \text{ selbstadjungiert} \\
|
2022-06-09 10:17:48 +02:00
|
|
|
\end{aligned}
|
|
|
|
\]
|
|
|
|
\end{satz}
|
|
|
|
\begin{proof}
|
|
|
|
Beweis über Matrizen, da äquivalent. Weiters die $\implies$ Richtung nur für \R.
|
|
|
|
\begin{itemize}
|
2022-06-09 18:15:13 +02:00
|
|
|
\item[$\implies$:] $A = U^T \Sigma V, A^\dagger = V^T \Sigma^\dagger U$
|
2022-06-09 10:17:48 +02:00
|
|
|
\[
|
2022-06-09 18:15:13 +02:00
|
|
|
A A^\dagger = U^T \Sigma \underbrace{V V^T}_{=I} \Sigma^\dagger U = U^T \Sigma \Sigma^\dagger U =
|
2023-01-31 13:30:38 +01:00
|
|
|
U^T \left(
|
|
|
|
\begin{smallmatrix}
|
|
|
|
1 \\ & \ddots \\ & & 1 \\ & & & 0 \\ & & & & \ddots \\
|
|
|
|
& & & & & 0
|
|
|
|
\end{smallmatrix}
|
|
|
|
\right) U
|
2022-06-09 10:17:48 +02:00
|
|
|
\]
|
2022-06-09 18:15:13 +02:00
|
|
|
$A^\dagger A$.
|
2022-06-09 10:17:48 +02:00
|
|
|
\[
|
2022-06-09 18:15:13 +02:00
|
|
|
A A^\dagger A = U^T \Sigma \underbrace{V V^T}_I \Sigma^\dagger \underbrace{U U^T}_I \Sigma V
|
|
|
|
= U^T \underbrace{\Sigma \Sigma^\dagger \Sigma}_\Sigma V = U^T \Sigma V = A
|
2022-06-09 10:17:48 +02:00
|
|
|
\]
|
2022-06-09 18:25:13 +02:00
|
|
|
\item[$\impliedby$:]
|
|
|
|
\begin{itemize}
|
|
|
|
\item Sei $\alpha \in \Hom(V, W), \alpha^\dagger \in \Hom(W, V)$
|
2023-01-31 13:30:38 +01:00
|
|
|
\begin{equation}
|
|
|
|
\label{eq:3.6.3.1}
|
2022-06-09 18:25:13 +02:00
|
|
|
\begin{aligned}
|
|
|
|
\ker(\alpha) = \ker(\alpha^\dagger \circ \alpha) & &
|
|
|
|
\im(\alpha) = \im(\alpha \circ \alpha^\dagger) \\
|
|
|
|
\ker(\alpha^\dagger) = \ker(\alpha \circ \alpha^\dagger) & &
|
|
|
|
\im(\alpha^\dagger) = \im(\alpha^\dagger \circ \alpha)
|
|
|
|
\end{aligned}
|
|
|
|
\end{equation}
|
2022-06-17 11:37:48 +02:00
|
|
|
$\ker(\alpha) \subseteq \ker(\alpha^\dagger \circ \alpha) \subseteq
|
|
|
|
\ker(\alpha \circ \alpha^\dagger \circ \alpha)
|
|
|
|
= \ker(\alpha) \implies \ker(\alpha) = \ker(\alpha^\dagger \circ \alpha)$
|
2022-06-18 18:20:57 +02:00
|
|
|
$\im(\alpha) \supseteq \im(\alpha \circ \alpha^\dagger) \supseteq
|
2022-06-17 11:37:48 +02:00
|
|
|
\im(\alpha \circ \alpha^\dagger \circ \alpha) = \im(\alpha) \implies
|
|
|
|
\im(\alpha) = \im(\alpha \circ \alpha^\dagger)$
|
2022-06-09 18:25:13 +02:00
|
|
|
\item $\nu := \alpha^\dagger \circ \alpha$ erfüllt $\nu \circ \nu$ und ist selbstadjungiert
|
2022-06-18 18:20:57 +02:00
|
|
|
für $\nu' := \alpha \circ \alpha^\dagger$ \\
|
2022-06-09 18:25:13 +02:00
|
|
|
$\implies \underbrace{\ker(\nu)}_{=\ker(\alpha)} \bot \im(\nu)$
|
|
|
|
[Sei $v\in \ker(\nu), w = \nu(v) \in \im(\nu) \implies \inner vw = \inner{\nu(v)}{w} = 0$] \\
|
|
|
|
$\implies$
|
|
|
|
\begin{enumerate}[label=\alph*)]
|
|
|
|
\item $\nu(v) \in \im(\nu)$
|
2023-03-28 11:46:57 +02:00
|
|
|
\item $\forall u \in \im(\nu), v \in V\colon$
|
2022-06-09 18:25:13 +02:00
|
|
|
\begin{align*}
|
|
|
|
\inner{\nu(v) - v}{u} & = \inner{\nu(v) - v}{\nu(w)} = \inner{\nu^2(v) - \nu(v)}{w} \\
|
|
|
|
& = \inner{\nu(v) - \nu(v)}{w} = 0
|
|
|
|
\end{align*}
|
|
|
|
\end{enumerate}
|
2022-06-13 11:29:12 +02:00
|
|
|
$\implies (b_1, \dots, b_n)$ ONB mit $\linspan{ b_{r+1}, \dots, b_n }_V = \ker(\nu) =
|
2022-06-09 18:25:13 +02:00
|
|
|
\ker(\alpha)$
|
|
|
|
\begin{align*}
|
|
|
|
& \sum_{i=1}^n \lambda_i b_i & & \overset{\nu}{\mapsto} \sum_{i=1}^r \lambda_i b_i
|
|
|
|
\text{[$\nu$ ist orthogonale Projektion auf $\ker(\alpha)^\bot$]} \\
|
|
|
|
\text{Analog:} & \sum_{i=1}^m \mu_i b_i' & & \overset{\nu'}{\mapsto} \sum_{i=1}^r \mu_i b_i'
|
|
|
|
\text{[$\nu'$ ist orthogonale Projektion auf $\im(\alpha)$]} \\
|
|
|
|
\end{align*}
|
|
|
|
\[
|
|
|
|
\implies {}_B M(\alpha^\dagger \circ \alpha)_B =
|
2023-01-31 13:30:38 +01:00
|
|
|
\left(
|
|
|
|
\begin{smallmatrix}
|
2022-06-09 18:25:13 +02:00
|
|
|
1 \\
|
|
|
|
& \ddots \\
|
|
|
|
& & 1 \\
|
|
|
|
& & & 0 \\
|
|
|
|
& & & & \ddots \\
|
|
|
|
& & & & & 0
|
2023-01-31 13:30:38 +01:00
|
|
|
\end{smallmatrix}
|
|
|
|
\right),\; {}_{B'} M(\alpha \circ \alpha^\dagger)_{B'} =
|
|
|
|
\left(
|
|
|
|
\begin{smallmatrix}
|
2022-06-09 18:25:13 +02:00
|
|
|
1 \\
|
|
|
|
& \ddots \\
|
|
|
|
& & 1 \\
|
|
|
|
& & & 0 \\
|
|
|
|
& & & & \ddots \\
|
|
|
|
& & & & & 0
|
2023-01-31 13:30:38 +01:00
|
|
|
\end{smallmatrix}
|
|
|
|
\right)
|
2022-06-09 18:25:13 +02:00
|
|
|
\]
|
|
|
|
\begin{equation}
|
|
|
|
\label{eq:3.6.3.2}
|
|
|
|
\begin{split}
|
|
|
|
\implies
|
2023-01-31 13:30:38 +01:00
|
|
|
\underbrace{\left(
|
|
|
|
\begin{smallmatrix}
|
2022-06-09 18:25:13 +02:00
|
|
|
1 \\
|
|
|
|
& \ddots \\
|
|
|
|
& & 1 \\
|
|
|
|
& & & 0 \\
|
|
|
|
& & & & \ddots \\
|
|
|
|
& & & & & 0
|
2023-01-31 13:30:38 +01:00
|
|
|
\end{smallmatrix}
|
|
|
|
\right)}_n &= {}_B M(\alpha^\dagger)_{B'} \cdot {}_{B'} M(\alpha)_B = \\
|
2022-06-09 18:25:13 +02:00
|
|
|
&=
|
|
|
|
\begin{pmatrix}
|
|
|
|
a_{11}^\dagger & \dots & a_{1m}^\dagger \\
|
|
|
|
\vdots & & \vdots \\
|
|
|
|
a_{n1}^\dagger & \dots & a_{nm}^\dagger
|
|
|
|
\end{pmatrix}
|
2023-01-31 13:30:38 +01:00
|
|
|
\underbrace{\left.\left(
|
|
|
|
\begin{smallmatrix}
|
2022-06-09 18:25:13 +02:00
|
|
|
s_1 \\
|
|
|
|
& \ddots \\
|
|
|
|
& & s_r \\
|
|
|
|
& & & 0 \\
|
|
|
|
& & & & \ddots \\
|
|
|
|
& & & & & 0
|
2023-01-31 13:30:38 +01:00
|
|
|
\end{smallmatrix}
|
|
|
|
\right)\right\}}_n \scriptstyle{m}
|
2022-06-09 18:25:13 +02:00
|
|
|
\end{split}
|
|
|
|
\end{equation}
|
|
|
|
\begin{align*}
|
|
|
|
\underbrace{
|
2023-01-31 13:30:38 +01:00
|
|
|
\left(
|
|
|
|
\begin{smallmatrix}
|
|
|
|
1 \\
|
|
|
|
& \ddots \\
|
|
|
|
& & 1 \\
|
|
|
|
& & & 0 \\
|
|
|
|
& & & & \ddots \\
|
|
|
|
& & & & & 0
|
|
|
|
\end{smallmatrix}
|
|
|
|
\right)
|
|
|
|
}_m & = {}_{B'} M(\alpha)_B \cdot {}_B M(\alpha^\dagger)_{B'} = \\
|
|
|
|
& =\scriptstyle{m}\underbrace{\left\{\left(
|
|
|
|
\begin{smallmatrix}
|
|
|
|
s_1 \\
|
|
|
|
& \ddots \\
|
|
|
|
& & s_r \\
|
|
|
|
& & & 0 \\
|
|
|
|
& & & & \ddots \\
|
|
|
|
& & & & & 0
|
|
|
|
\end{smallmatrix}
|
|
|
|
\right)\right.}_n
|
2022-06-09 18:25:13 +02:00
|
|
|
\begin{pmatrix}
|
|
|
|
a_{11}^\dagger & \dots & a_{1m}^\dagger \\
|
|
|
|
\vdots & & \vdots \\
|
|
|
|
a_{n1}^\dagger & \dots & a_{nm}^\dagger
|
|
|
|
\end{pmatrix}
|
|
|
|
\end{align*}
|
|
|
|
Es gilt $\ker(\alpha^\dagger) = \ker(\nu') = \im(\alpha)^\bot
|
2022-06-13 11:29:12 +02:00
|
|
|
= \linspan{ b_{r+1}', \dots, b_r' } \implies a^\dagger_{i\_} = 0 \forall i > r$
|
2022-06-09 18:25:13 +02:00
|
|
|
\[
|
|
|
|
\implies {}_B M(\alpha^\dagger)_{B'} =
|
|
|
|
\begin{pmatrix}
|
|
|
|
\begin{smallmatrix}
|
|
|
|
a_{11}^\dagger & \dots & a_{1r}^\dagger \\
|
|
|
|
\vdots & & \vdots \\
|
|
|
|
a_{r1}^\dagger & \dots & a_{rr}^\dagger
|
2023-01-31 13:30:38 +01:00
|
|
|
\end{smallmatrix}
|
|
|
|
& 0 \\
|
|
|
|
0 & 0
|
2022-06-09 18:25:13 +02:00
|
|
|
\end{pmatrix}
|
|
|
|
\text{ und } a_{ij}^\dagger = \delta_{ij} \frac{1}{s_i} \text{ wegen \ref{eq:3.6.3.2}}
|
|
|
|
\]
|
|
|
|
\end{itemize}
|
2022-06-09 10:17:48 +02:00
|
|
|
\end{itemize}
|
|
|
|
\end{proof}
|
|
|
|
|
|
|
|
\begin{satz}
|
|
|
|
Sei $\alpha \in \Hom(V, W)$.
|
|
|
|
\begin{itemize}
|
2022-06-09 18:15:13 +02:00
|
|
|
\item $\alpha$ injektiv $\implies \alpha^\dagger = (\alpha^* \circ \alpha)^{{}^{-1}} \circ \alpha^*$
|
2022-06-15 11:32:12 +02:00
|
|
|
\item $\alpha$ surjektiv $\implies \alpha^\dagger = \alpha^* \circ (\alpha \circ \alpha^*)^{{}^{-1}}$
|
2022-06-09 10:17:48 +02:00
|
|
|
\end{itemize}
|
|
|
|
\end{satz}
|
|
|
|
\begin{proof}
|
|
|
|
Sei $\alpha$ injektiv $\implies \alpha^* \circ \alpha$ bijektiv. Angenommen $\alpha^* \circ \alpha$ nicht
|
|
|
|
surjektiv $\implies$
|
|
|
|
\begin{align*}
|
2023-03-28 11:46:57 +02:00
|
|
|
\exists w \in V \setminus \{0\}\colon \forall v \in V\colon \inner{\alpha^* \circ \alpha(v)}{w} = 0 \\
|
|
|
|
\implies \forall v\colon \inner{\alpha(v)}{\alpha(w)}_W = 0 \\
|
|
|
|
\implies \alpha(w) \in \im(\alpha)^\bot \cap \im(\alpha) \implies \alpha(w) = 0 \\
|
2022-06-09 10:17:48 +02:00
|
|
|
\overset{\alpha \text{ injektiv}}{\implies} w = 0 & \text{\Lightning}
|
|
|
|
\end{align*}
|
|
|
|
$\implies \beta:= (\alpha^* \circ \alpha)^{{}^{-1}} \circ \alpha^*$ ist wohldefiniert.
|
|
|
|
Nun gilt:
|
|
|
|
\begin{itemize}
|
|
|
|
\item $\alpha \circ \beta \circ \alpha = \alpha \circ (\alpha^* \circ \alpha)^{{}^{-1}} \circ \alpha^* \circ
|
|
|
|
\alpha = \alpha$
|
|
|
|
\item $\beta \circ \alpha \circ \beta = (\alpha^* \circ \alpha)^{{}^{-1}} \circ \alpha^* \circ \alpha \circ
|
|
|
|
\beta = \beta$
|
|
|
|
\item $\beta \circ \alpha, \alpha \circ \beta$ sind selbstadjungiert.
|
|
|
|
\end{itemize}
|
2022-06-09 18:15:13 +02:00
|
|
|
$\underset{\text{Satz \ref{theo:3.6.3}}}{\implies} \beta = \alpha^\dagger$
|
2022-06-09 10:17:48 +02:00
|
|
|
\end{proof}
|
|
|
|
|
2022-06-08 18:06:01 +02:00
|
|
|
\subsubsection{Anwendung: Methode der kleinsten Quadrate}
|
2023-01-31 13:30:38 +01:00
|
|
|
Sei $Ax = b$ Lineares Gleichungssystem mit $L(A,b) = \emptyset$. Versuche ein
|
|
|
|
$x$ zu finden mit $\norm{Ax-b}_{\K^m}$ minimal, $\norm{\alpha(v) - w}$ minimal.
|
|
|
|
Sei $b_1, \dots, b_n$ Orthonormalbasis von $V$, \\ $b_1', \dots, b_m'$ ONB von
|
|
|
|
$W$. $\linspan{ b_1, \dots, b_r } = \ker(\alpha)^\bot, \linspan {b_1', \dots
|
|
|
|
b_r'} = \im(\alpha)$ $v = \sum_{i=1}^n \lambda_i b_i \implies \alpha(v) =
|
|
|
|
\sum_{i=1}^r s_i \lambda_i b_i'$ $w = \sum_{i=1}^n \mu_i b_i'$
|
2022-06-08 18:06:01 +02:00
|
|
|
|
|
|
|
\[
|
|
|
|
\norm{\alpha(v) - w}^2 = \norm{\sum_{i=1}^r s_i \lambda_i b_i' - \sum_{i=1}^m \mu_i b_i'}^2 =
|
|
|
|
\sum_{i=1}^r (s_i \lambda_i - \mu_i)^2 + \sum_{i = r+1}^m \mu_i^2
|
|
|
|
\]
|
2023-01-31 13:30:38 +01:00
|
|
|
Wird minimal wenn $\lambda_i = \frac{\mu_i}{s_i}, i \in [r]$, das heißt das
|
|
|
|
optimale $v$ ist durch $v^\dagger = \alpha^\dagger(w)$ gegeben.
|
|
|
|
$\left[\text{Alle optimalen durch }v^\dagger + \sum_{j=r+1}^m \mu_j^2 =
|
|
|
|
L(\alpha^* \alpha, \alpha^*(w))\right]$
|
2022-06-08 18:06:01 +02:00
|
|
|
|
|
|
|
\begin{satz}
|
2022-06-09 10:17:48 +02:00
|
|
|
Sei $\alpha \in \homk(V, W), \K \in \{\R,\C\}, V, W$ endlich dimensional.
|
2022-06-09 18:15:13 +02:00
|
|
|
Sei $w \in W$. Dann gilt mit $v^\dagger = \alpha^\dagger(w)$ dass
|
2022-06-08 18:06:01 +02:00
|
|
|
\[
|
2022-06-09 18:15:13 +02:00
|
|
|
\norm{\alpha(v^\dagger)-w} = \min_{v\in V} \norm{\alpha(v) - w}
|
2022-06-08 18:06:01 +02:00
|
|
|
\]
|
2023-01-31 13:30:38 +01:00
|
|
|
Alle Vektoren mit dieser Eigenschaft erfüllen die\\
|
|
|
|
\underline{Normalgleichungen} $\ontop{\alpha^* \alpha(v) = \alpha^*(w)}{A^* A x
|
|
|
|
= A^* b}$
|
2022-06-08 18:06:01 +02:00
|
|
|
\end{satz}
|
2022-06-09 18:25:13 +02:00
|
|
|
\begin{proof}
|
|
|
|
Angenommen $L(A, b) \neq \emptyset$. $\to$ Suche $v \in L(A, b)$ mit minimaler Norm:
|
|
|
|
Sei $w \in \im(\alpha) \implies w = \sum_{j=1}^r \mu_j b_j'$
|
|
|
|
\[
|
2023-03-28 11:46:57 +02:00
|
|
|
\implies L(\alpha, w) = \left\{ \sum_{j=1}^r \frac{\mu_j}{s_j} b_j + \sum_{j=r+1}^n \lambda_j b_j\colon
|
2022-06-09 18:25:13 +02:00
|
|
|
\lambda_{r+1}, \dots, \lambda_n \in \K\right\}
|
|
|
|
\]
|
2023-01-31 13:30:38 +01:00
|
|
|
Minimale Norm, wenn $\lambda_{r+1}, \dots, \lambda_n = 0$ das heißt für $v =
|
|
|
|
\sum_{i=1}^r \frac{\mu_i}{s_i} b_i = \alpha^\dagger(w)$
|
2022-06-09 18:25:13 +02:00
|
|
|
\end{proof}
|
2022-06-08 18:06:01 +02:00
|
|
|
|
|
|
|
\begin{satz}
|
|
|
|
Sei $\alpha \in \Hom(V, W), w \in \im(\alpha)$.
|
2022-06-09 18:15:13 +02:00
|
|
|
Dann gilt mit $v^\dagger = \alpha^\dagger (w)$:
|
2022-06-08 18:06:01 +02:00
|
|
|
\[
|
2023-03-28 11:46:57 +02:00
|
|
|
\norm{v^\dagger} = \min\{\norm v\colon \alpha(v) = w \}
|
2022-06-08 18:06:01 +02:00
|
|
|
\]
|
|
|
|
\end{satz}
|
|
|
|
|
2022-06-09 18:25:13 +02:00
|
|
|
\subsubsection{Beispiel (lineare Regression)}
|
|
|
|
$(t_i, y_i)_{i=1}^m$ gegeben. Wollen Polynome finden, die gut auf die Messungen passen.
|
2023-03-28 11:46:57 +02:00
|
|
|
Suchen also $p\colon p(t_i) \sim y_i, \forall i \in [m]$ \\
|
2022-06-09 18:25:13 +02:00
|
|
|
\begin{tikzpicture}[declare function={approx(\x) = 0.17 * \x * \x + -1.1 * \x + 4.1;}]
|
|
|
|
\draw [red, thick] plot [domain=0:10,samples=144,smooth] (\x, {approx(\x)}) node[right,color=black]
|
|
|
|
{$p(t) = a_0 + a_1 t + a_2 t^2$};
|
|
|
|
\foreach[count=\i] \diff in {0.93,-0.56,-2.12,1.35,-0.83,0.87,-0.04,-1.16,-0.02,1.25,0.13,-0.62,2.71,-1.84,-0.24,1.64,-0.06,-0.52,-0.21,-1.24}
|
|
|
|
\filldraw[ForestGreen] (\i * .5, {approx(\i * .5) + \diff * .6}) circle (.5mm);
|
|
|
|
\draw[->] (0, 0) -- (10, 0);
|
|
|
|
\draw[->] (0, 0) -- (0, 10);
|
|
|
|
\end{tikzpicture}
|
|
|
|
\[
|
|
|
|
\text{minimiere }
|
|
|
|
\sum_{i=1}^m (f(t_i) - y_i)^2 = \sum_{i=1}^m (a_0 + a_1 t_i + a_2 t_i^2 - y_i)^2 = \norm{A x -b}^2_{\K^m}
|
|
|
|
\text{ wobei}
|
|
|
|
\]
|
|
|
|
\[
|
2023-01-31 13:30:38 +01:00
|
|
|
A =
|
|
|
|
\begin{pmatrix}
|
|
|
|
1 & t_1 & t_1^2 \\
|
|
|
|
\vdots & \vdots & \vdots \\
|
|
|
|
1 & t_m & t_m^2
|
|
|
|
\end{pmatrix}
|
|
|
|
,
|
|
|
|
x =
|
|
|
|
\begin{pmatrix}
|
|
|
|
a_0 \\
|
|
|
|
a_1 \\
|
|
|
|
a_2
|
|
|
|
\end{pmatrix}
|
|
|
|
,
|
|
|
|
b =
|
|
|
|
\begin{pmatrix}
|
|
|
|
y_1 \\
|
|
|
|
\vdots \\
|
|
|
|
y_m
|
|
|
|
\end{pmatrix}
|
2022-06-09 18:25:13 +02:00
|
|
|
\]
|
|
|
|
|
|
|
|
\subsubsection{Anwendung: Ausgleichsquadrik}
|
2023-01-31 13:30:38 +01:00
|
|
|
Problem: homogenes LGS $Ax=0$. Finde $x$ mit $\norm x = 1$ und $\norm{Ax}$
|
|
|
|
minimal. \\ $b_1, \dots, b_n$ ONB aus EVen von $A^* A$ mit nichtnegativen EWen.
|
2022-06-09 18:25:13 +02:00
|
|
|
\begin{align*}
|
|
|
|
X = \sum \lambda_i b_i \implies \norm{Ax}^2 & = \inner{Ax}{Ax} \\
|
|
|
|
& = \inner{A^* A x}{x} =
|
|
|
|
\inner{\sum s_i \lambda_i b_i}{\sum \lambda_j b_j} = \sum_{i=1}^n s_i \abs{\lambda_i}^2
|
|
|
|
\end{align*}
|
|
|
|
$s_1 \le s_2 \le \dots \le s_n, \norm x = \sum \abs{\lambda_i}^2$
|
|
|
|
\[
|
|
|
|
\frac{\norm{Ax}}{\norm x} = \frac{\sum s_i \abs{\lambda_i}^2}{\sum \abs{\lambda_i}^2} \ge
|
|
|
|
\frac{s_1 \sum \abs{\lambda_i}^2}{\sum \abs{\lambda_i}^2} s_1
|
|
|
|
\]
|
|
|
|
$\norm x = 1 \implies \norm{Ax} \ge s_1$
|
|
|
|
$\norm{b_i} \implies \lambda_1, \lambda_2 = \dots = \lambda_n = 0 \implies \norm{Ab_1} = s_1 \implies b_1$
|
|
|
|
löst unser Minimierungsproblem. \\
|
2023-03-28 11:46:57 +02:00
|
|
|
$Q = \{(x,y) \in \R^2\colon \psi(x, y) = 0\}$ \\
|
2022-06-09 18:25:13 +02:00
|
|
|
$\psi(x, y):= a_1 x^2 + a_2 xy + a_3 y^2 a_4 x + a_5 y + a_6$ \\
|
|
|
|
Gegeben: $(x_i,y_i)^m_{i=1}$ Suche $x = (a_1, \dots, a_6)^T$ mit $\norm x = 1$ sodass
|
|
|
|
\[
|
2022-06-15 19:34:43 +02:00
|
|
|
\sum_{i=1}^m \left(a_1 x_i^2 + a_2 x_i y_i + a_3 y_i^2 + a_4 x_i + a_5 y_i + a_6\right)^2=\norm{Ax}^2
|
2022-06-09 18:25:13 +02:00
|
|
|
\]
|
2023-01-31 13:30:38 +01:00
|
|
|
minimal. $A =
|
|
|
|
\begin{pmatrix}
|
2022-06-09 18:25:13 +02:00
|
|
|
x_1^2 & x_1 y_1 & y_1^2 & x_1 & y_1 & 1 \\
|
|
|
|
\vdots & \vdots & \vdots & \vdots & \vdots & \vdots \\
|
|
|
|
x_m^2 & x_m y_m & y_m^2 & x_m & y_m & 1 \\
|
2023-01-31 13:30:38 +01:00
|
|
|
\end{pmatrix}
|
|
|
|
$ \\
|
2022-06-09 18:25:13 +02:00
|
|
|
Suche $x \in \R^6$ mit $\norm x = 1$ und $\norm{Ax}$ minimal.
|
|
|
|
$\implies x$ ist Eigenvektor von $A^* A$ zum kleinsten Eigenwert.
|
|
|
|
|
2022-06-11 12:51:34 +02:00
|
|
|
\begin{nonumbersatz}
|
2022-06-09 18:25:13 +02:00
|
|
|
Sei $A \in \K^{m \times n}$ und $b \in \K^n$ Eigenvektor von $A^* A$ zum kleinsten Eigenwert $r_1$.
|
|
|
|
Dann gilt
|
|
|
|
\[
|
2023-03-28 11:46:57 +02:00
|
|
|
\frac{\norm{Ab}}{\norm b} = \min\left\{\frac{\norm{Ax}}{\norm x}\colon x\in\R^n\right\} = \sqrt{r_1}
|
2022-06-09 18:25:13 +02:00
|
|
|
\]
|
2022-06-11 12:51:34 +02:00
|
|
|
\end{nonumbersatz}
|
2022-06-09 10:17:48 +02:00
|
|
|
|
2022-04-12 12:48:05 +02:00
|
|
|
\end{document}
|