% -*- Mode: LaTeX -*-
\documentclass[12pt, fullpage]{article}
\usepackage{fullpage}
\usepackage{latexsym}
\usepackage{amsthm}
\usepackage{amssymb}
\usepackage{amsmath}
\usepackage[all]{xy}
\setlength{\parskip}{0.5em}
\setlength{\parindent}{0em}
%\renewcommand{\baselinestretch}{1.5}
%\renewcommand{\theenumi}{(\arabic{enumi})}
\newtheorem{theorem}{Theorem}
\newtheorem{corollary}{Corollary}
%\newtheorem{coroflem}{Corollary of the Lemma}
\newtheorem{proposition}{Proposition}
\newtheorem{lemma}{Lemma}
\newtheorem{definition}{Definition}
\newtheorem{example}{Example}
\newtheorem{exercise}{Exercise}[section]
%\newcounter{Lcount}
%\newenvironment{letterlist}{\setcounter{Lcount}{1}\begin{list}{\alph{Lcount})}}{\end{list}}
\newcommand{\g}{\mathfrak{g}}
\newcommand{\B}{\mathfrak{B}}
\newcommand{\HgV}{H^1(\g,V)}
\newcommand{\tr}{{\rm tr}}
\title{Lecture 22 -- November 30, 2004}
\author{Prof. Victor Ka\v{c}\\ Scribes: Genya Zaytman and Yaim Cooper}
\date{}
\begin{document}
\maketitle
\setcounter{section}{22}
%\setcounter{exercise}{1}
{\bf Definition.} An enveloping algebra of a Lie algebra $\g$ is a pair $(U, \varphi)$ where $U$ is a unital associative algebra and $\varphi$ is a Lie algebra homomorphism $\g\rightarrow U_-$ (where $U_-$ denotes the Lie algebra structure on U given by $[a,b] = ab-ba)$.
{\bf Example.} Given a representation $\varphi :\g\rightarrow \mathrm{End\ } V = U$, we have an enveloping algebra $(U, \varphi)$ of $\g$.
{\bf Definition.} The universal enveloping algebra of $\g$ is an enveloping algebra $(U(\g), \varphi )$ which is universal in the sense that for any other enveloping algebra $(U,\varphi)$, there is a unique associative algebra homomorphism $\pi : U(\g) \rightarrow U$, which makes the following diagram commute: (with respect to Lie algebra homomorphisms)
$$\xymatrix{ \g \ar[r]^{\psi} \ar[d]_{\varphi} & U(\g)_- \ar[dl]_\pi \\
U(\g)_-}$$
{\bf Theorem.} For any Lie algebra $\g$, a universal enveloping algebra exists and is unique.
\begin{proof}
1. Uniqueness: Suppose there are two universal enveloping algebras, $U(\g)_-$ and $U'(\g)_-$.
$$\xymatrix{ \g \ar[r]^{\psi} \ar[d]_{\psi'} & U(\g)_- \ar[dl]_{\pi'} \\
U(\g)'_- \ar@/_/[ur]_\pi}$$
But then $\pi \circ \pi' : U(\g) \rightarrow U(\g)$, and also $\pi' \circ \pi : U'(\g) \rightarrow U'(\g)$.
But $$\xymatrix{ \g \ar[r]^{\varphi} \ar[d]_{\psi} & U(\g) \\
U(\g) \ar[ur]^{\pi \circ \pi'} \ar@/_/[ur]_1}$$ is a commuting diagram.
By uniqueness, we see that $\pi \circ \pi' = 1$, and by symmetry, $\pi' \circ \pi = 1$.
Existence: Let $a_i$ be a basis for $\g$. Let $U(\g)$ be a unital associative algebra generated by $a_i$, with relations $a_i a_j - a_j a_i = [a_i , a_j]$. Let $\varphi (a_i) = a_i$.
Since we divided by the above relations, $\varphi$ is a Lie algebra homomorphism. So, this is an enveloping algebra.
It is universal because:
$$\xymatrix{ \g \ar[r]^{\psi} \ar[d]_{\varphi} & U(\g)_- \ar[dl]_\pi \\
U(\g)_-}$$
This diagram commutes if we let $\pi (a_{i_1} ... a_{i_s}) = \varphi (a_{i_1}) ... \varphi (a_{i_s})$.
\end{proof}
{\bf Poincare-Birkhoff-Witt Theorem.} The monomials $a_{i_1} ... a_{i_s}$ with $i_1 \leq i_2 \leq ... i_s$ form a basis of $U(\g)$. In particular, $\psi : \g \rightarrow U(\g)_-$ is an embedding.
\begin{proof}
I) These monomials span $U(\g)$. Of course, the unordered monomials $a_{j_1} .. a_{j_s}$ span $U(\g)$. We prove by induction on $(s, $ number of inversions$)$ that using relations $[a_i, a_j]=a_i a_j - a_j a_i$, we can bring this monomial to a linear combination of ordered monomials. If $...a_{j_t} a_{j_t+1} ...$ with $j_t > j_t+1$, we replace $a_{j_t} a_{j_t+1}$ with $[a_{j_t}, a_{j_t+1}] + a_{j_t+1} a_{j_t}$, (where $[a_{j_t}, a_{j_t+1}]$ is a linear combination of the generators $a_i$). We get a sum of monomials, where the number of factors is less than s in all but the last one, and in that one, the number of inversions drops by 1. Thus we can apply the inductive assumption.
II) Let $\B_n$ be the free vector space on generators $u_{i_1}... u_{i_n}$, $i_1 \leq i_2... \leq i_n$. Let $\B = 1 \oplus \B_1 \oplus \B_2 \cdots$. We will show that there is a linear map $\sigma : U(\g) \rightarrow \B$ such that the image of the ordered monomials under $\sigma$ is linearly independent, which will complete the theorem.
Let us define $\sigma$ by
\begin{equation}
\sigma (1) = 1, \sigma (a_{i_1} \cdots a_{i_n}) = u_{i_1} \cdots u_{i_n} {\rm \, if \,} i_1 \leq \cdots \leq i_n.
\end{equation}
Finally, we want to have
\begin{equation}
\sigma (a_{j_1}\cdots a_{j_n} - a_{j_1}\cdots a_{j_{k+1}} a_{j_k} \cdots a_{j_n}) = \sigma (a_{j_1} \cdots [ a_{j_k},a_{j_{k+1}}] \cdots a_{j_n}).
\end{equation}
To show that such a linear map exists, we assume it has been defined for all monomials of degree less than or equal to $n-1$ in $U(\g)$, and show that it can be extended to a map on all monomials of degree less than or equal to $n$ in $U(\g)$. So, assuming $\sigma$ has been defined for all monomials of degree less than or equal to $n-1$ in $U(\g)$, if $a_{i_1} \cdots a_{i_n}$ is an ordered monomial, let $\sigma (a_{i_1} \cdots a_{i_n}) = u_{i_1} \cdots u_{i_n}$.
If $a_{i_1} \cdots a_{i_n}$ is not ordered, suppose $j_k > j_{k+1}$. Then set
\begin{equation}
\sigma (a_{j_1}\cdots a_{j_n}) = \sigma (a_{j_1}\cdots a_{j_{k+1}} a_{j_k} \cdots a_{j_n}) + \sigma (a_{j_1} \cdots [ a_{j_k},a_{j_{k+1}}] \cdots a_{j_n}).
\end{equation}
We must check that this map is well defined, in that its independent of the choice of the pair $(j_k, j_{k+1})$. Suppose $(j_l, j_{l+1})$ is another pair with $j_l > j_{l+1}$. There are two cases: 1) $l>k+1$ and 2) $l=k+1$.
1) Set $a_{j_k}=u, a_{j_{k+1}}=v, a_{j_l}=w, a_{j_{l+1}}=x.$ Then the induction hypothesis permits us to write for the right hand side of (3)
\begin{equation}
\sigma(\cdots v u \cdots x w \cdots) + \sigma(\cdots v u \dots [wx] \cdots) + \sigma(\cdots [u v] \dots x w \cdots) + \sigma(\cdots [u v] \dots [wx] \cdots)
\end{equation}
If we start with $(j_l, j_{l+1})$, we obtain
\begin{multline}
\sigma(\cdots u v \cdots x w \cdots) + \sigma(\cdots u v \dots [wx] \cdots) =
\sigma(\cdots v u \dots x w \cdots) + \\
+ \sigma(\cdots [u v] \dots x w \cdots) + \sigma(\cdots v u \cdots [w x] \cdots) + \sigma(\cdots [u v] \dots [w x] \cdots)
\end{multline}
This is the same as the value obtained before.
2) Set $a_{j_k}=u, a_{j_{k+1}}=v, a_{j_{l+1}}=w$. Using the induction hypothesis, the right hand side of (3) becomes
\begin{equation}
\sigma(\cdots w v u \cdots) + \sigma(\cdots [v w] u \cdots) + \sigma(\cdots v [u w] \cdots) + \sigma(\cdots [u v] w \cdots)
\end{equation}
Similarly, if we start with $\sigma (\cdots w v u \cdots) + \sigma (\cdots u [v w])$, we can wind up with
\begin{equation}
\sigma(\cdots w v u \cdots) + \sigma(\cdots w [u v]\cdots) + \sigma(\cdots [u w] v \cdots) + \sigma(\cdots u [v w] \cdots)
\end{equation}
So we must show that $\sigma$ applied to
\begin{multline}
(\cdots [v w] u \cdots) - (\cdots u [v w]\cdots) + (\cdots v [u w] \cdots) - \\
- (\cdots [u w] v \cdots)+ (\cdots [u v] w \cdots) - (\cdots w [u v]\cdots),
\end{multline}
a monomial of degree less than or equal to $n-1$, gives 0.
But from the properties of $\sigma$ on all monomials of degree less than or equal to $n-1$ in $U(\g)$, if $(\cdots x y \cdots)$ is a monomial of degree less than or equal to $n-1$,
\begin{equation}
\sigma(\cdots x y \cdots) - \sigma(\cdots y x \cdots) - \sigma(\cdots [x y] \cdots) = 0.
\end{equation}
Hence $\sigma$ applied to (8) gives
\begin{equation}
(\cdots [[v w] u] \cdots) + (\cdots [v [u w]]\cdots) + (\cdots [[u v] w] \cdots)
\end{equation}
which is zero by the Jacobi identity and linearity of $\sigma$. Thus $\sigma$ is well defined on monomials of degree less than or equal to $n$, as well, and we extend $\sigma$ linearly to the space spanned by all monomials of degree less than or equal to $n$ in $U(\g)$. In this way, $\sigma$ is defined on all of $U(\g)$, and clearly, the image of the ordered monomials in $U(\g)$ is linearly independent in $\B$, as they are in bijection with the generators of $\B$. Thus the proof is completed.
\end{proof}
{\bf Remark.} Any representation $\pi$ of a Lie algebra $\g$ in a vector space $V$ extends to a represenation of $U(\g) \rightarrow$ End $V$ (as associative algebras).
{\bf Definition.} Given a representation $\pi$ of $\g$ over $V$, $V$ can be considered a $\g$-module by defining a binary product $\g \times V$ into $V$ mapping $g\cdot v$ to $\pi (\g) v$. Thus the defining property of a $\g$-module is: $[a,b]v=ab v - ba v$. By a homomorphism of $\g$-modules we mean a linear map $\varphi : V_1 \rightarrow V_2$ such that $\varphi(gv) = g \varphi(v)$. An isomorphism is a homomorphism $\varphi$ which is bijective.
Let $\g$ be a finite dimensional Lie algebra with a fixed non-degenerate invariant symmetric bilinear form $(.,.)$. Chose a basis ${u_i}$ of $\g$ and let ${v_i}$ be the dual basis, which means $(u_i, v_j)= \delta_{ij}$.
{\bf Definition.} The Casimir operator $\Omega = \sum_i u_i v_i \in U(\g)$.
\begin{exercise}
$\Omega$ is independent of the choice of the basis ${u_i}$.
\end{exercise}
\begin{proof}[Solution]
Take another basis ${{u_i}'}$ with dual basis ${{v_i}'}$. Since ${u_i}$ was a basis, we can write ${u_i}' = \sum_j a_{ij} u_j$. In this basis, $\Omega' = \sum_i {u_i}' {v_i}'$.
Note that by definition, $({u_i}', {v_k}') = \delta_{ij} = (\sum_j a_{ij} u_j, v_k') = \sum_j (a_{ij}) (u_j, v_k')$. Let the matrix $A = \langle a_{ij} \rangle$, and $B = \langle (u_j, v_k') \rangle$. Clearly, $AB=I$. Now consider $I=BA =\sum_k (u_j, v_k') a_{ki}$. This implies that $\sum_k (u_j v_k') a_{ki} = \delta_{ji} = (u_j, \sum_k a_{ki} v_k')$. But since $v_i$ is the unique vector such that $(u_j, v_i) = \delta_{ji}$, it follows that $v_i = \sum_k a_{ki} v_k'$.
Finally, $\Omega' = \sum_i {u_i}' {v_i}' = \sum_i ( \sum_j a_{ij} u_j) {v_i}' = \sum_{j} \sum_i a_{ij} {v_i}' u_j$. By the result of the previous paragraph, this equals $\sum_j v_j u_j$, which gives the desired result, $\Omega' = \sum_i u_i v_i = \Omega$.
\end{proof}
{\bf Lemma 1.}
(on dual bases)
For $a\in\g$ write $[a,u_i]=\sum_j a_{ij}u_j$ and $[b,u_i]=\sum_j
b_{ij}u_j$. Then $a_{ij}=-b_{ji}$ (under the above assumptions).
\begin{proof}
We have: $([a,u_i],v_k)=\sum_j a_{ij}(u_j,v_k)=a_{ik}$ and similarly
$([a,v_i],u_k)=b_{ik}$. Hence $a_{ik} =(a,[u_i,v_k])$ and $b_{ik}
=(a,[v_i,u_k])$. Therefore $a_{ik}=-b_{ki}$.
\end{proof}
{\bf Definition.} Let $V$ be a $\g$-module, where $\g$ is a Lie
algebra. A 1-cocycle is a linear map $\varphi:\g\mapsto V$ such that
$\varphi([a,b])= a\varphi(b)-b\varphi(a)$. The space of 1-cocycles is
denoted $Z(\g,V)$.
{\bf Example of a 1-cocycle.} The trivial 1-cocycle associated to
$v\in V$ is $\varphi_v(a)=a\cdot v$.
\begin{exercise}
Check that $\varphi_v$ is a 1-cocycle.
\end{exercise}
\begin{proof}[Solution]
$\varphi_v([a,b])= [a,b]v= a(bv)-b(av)= a\varphi_v(b)-b\varphi_v(a)$
\end{proof}
The trivial 1-cocycles form a subspace $B(\g,V)$ of $Z(\g,V)$. Let
$\HgV=Z(\g,V)/B(\g,V)$.
{\bf Main Theorem on Cohomology.} If $\g$ is a semi-simple Lie algebra
over an algebraicly closed field of characteristic 0, and $V$ is a
finite dimensional $\g$-module, then $\HgV=0$, i.e., every 1-cocycle
is trivial.
\begin{exercise}
$H^1(\g,V_1\oplus V_2)= H^1(\g,V_1)\oplus H^1(\g,V_2)$ where
$V_1\oplus V_2$ denotes the direct sum of $\g$-modules $V_1$ and
$V_2$.
\end{exercise}
\begin{proof}[Solution]
First we show that $Z(\g,V_1\oplus V_2)= Z(\g,V_1)\oplus Z(\g,V_2)$.
It is clear that $Z(\g,V_1\oplus V_2)\supset Z(\g,V_1)\oplus
Z(\g,V_2)$. Furthermore, every 1-cocycle $\varphi\in Z(\g,V_1\oplus
V_2)$ can be decomposed as
$\pi_1\circ\varphi\oplus\pi_2\circ\varphi\in Z(\g,V_1)\oplus
Z(\g,V_2)$.
It is also clear that $B(\g,V_1\oplus V_2)= B(\g,V_1)\oplus B(\g,V_2)$
since $\varphi_{v_1\oplus v_2}= \varphi_{v_1}\oplus\varphi_{v_2}$.
Therefore $H^1(\g,V_1\oplus V_2)= Z(\g,V_1\oplus V_2)/B(\g,V_1\oplus
V_2)= Z(\g,V_1)\oplus Z(\g,V_2)/B(\g,V_1)\oplus B(\g,V_2)=
Z(\g,V_1)/B(\g,V_1)\oplus Z(\g,V_2)/B(\g,V_2)= H^1(\g,V_1)\oplus
H^1(\g,V_2)$.
\end{proof}
{\bf Lemma 2.} In the situation of lemma 1. Let $V$ be a $\g$-module
and $\varphi:\g\mapsto V$ a 1-cocycle. Then for any $a\in\g$ we have:
$a\sum_i u_i\varphi(v_i)= \Omega\varphi(a)$.
\begin{proof}
$a\sum_i u_i\varphi(v_i)= \sum_i [a,u_i]\varphi(v_i)+ \sum_i
u_ia\varphi(v_i)= \sum_{i,j}a_{ij}u_j\varphi(v_i)+ \sum_i
u_ia\varphi(v_i)= \\ \sum_j u_j \varphi(\sum_i a_{ij}v_i)+ \sum_i
u_ia\varphi(v_i)$. Now $\sum_i a_{ij}v_i=-\sum_i b_{ji}v_i=-[a,v_i]$,
so $a\sum_i u_i\varphi(v_i)= -\sum_j u_j\varphi([a,v_i])+ \sum_j
u_ja\varphi(v_j)= \sum_j u_j(a\varphi(v_j)-\varphi([a,v_i]))= \sum_j
u_jv_j\varphi(a)= \Omega\varphi(a)$.
\end{proof}
{\bf Corollary.} $\g$ commutes with $\Omega$, i.e., in any $\g$-module
$a(\Omega v)=\Omega(av)$ for all $a\in\g$, $v\in V$.
\begin{proof}
Apply lemma 2 to the trivial cocycle $\varphi_v(a)=a\cdot v$:
$a(\Omega v)= a\sum_i u_iv_i(v)= \Omega(av)$.
\end{proof}
\begin{proof}[Proof of the Main Theorem on Cohomology]
The proof is by induction on the dimension of the $\g$-module $V$.
First note that we may assume that $V$ is faithful, that is, $aV=0$
implies $a=0$ for $a\in\g$. Indeed let $\g_0=\{a\in\g|aV=0\}$. This
is an ideal of $\g$. Hence $\g_0$ and $\g/\g_0$ are again semi-simple
Lie algebras. In particular $[\g_0,\g_0]=\g_0$. Let $\varphi$ be a
1-cocycle of $\g$ in $V$, i.e., $\varphi([a,b])=
a\varphi(b)-b\varphi(a)$. If $a, b\in\g_0$, we get $\varphi([a,b])=
0$. So $\varphi([\g_0,\g_0])=0$, therefore $\varphi([a,b])= 0$.
Hence $\varphi:\g/\g_0\mapsto V$, so we may replace $\g$ by $\g/\g_0$.
We want to apply lemma 2.
Take $(a,b)=\tr_V ab$. It is non-degenerate since $\g$ is
semi-simple. Let $\{u_i\}$ be a basis of $\g$, $\{v_i\}$ the dual
basis, and $\Omega=\sum_i u_iv_i$ the Casmir operator. We decompose
$V=V_0\oplus V_1$, where $V_0$ is the generalized eigenspace of
$\Omega$ attached to $0$ and $V_1$ is the sum of all the other
generalized eigenspaces. By the corollary $V_0$ and $V_1$ are
$\g$-invariant. So by Exercise 22.3 $\HgV=
H^1(\g,V_0)\oplus H^1(\g,V_1)$. If $V_0$ and $V_1$ are not both zero,
by the induction hypothesis $H^1(\g,V_0)=0$ and $H^1(\g,V_1)=0$ and so
$\HgV=0$. Hence we may assume $V=V_0$ or $V_1$.
Case 1: $V=V_1$. So $\Omega$ is invertible. Let $v=\sum_i
u_i\varphi(v_i)$. Lemma 2 now states that $a(v)= \Omega\varphi(a)$.
Hence $\varphi(a)= \Omega^{-1}a(v)= a(\Omega^{-1}v)$. So
$\varphi=\varphi_{\Omega^{-1}v}$ is a trivial cocycle.
Case 2: $V=V_0$. So $\Omega$ is a nilpotent operator. Hence
$\tr_V(\Omega)=0$, but $\tr_V(\Omega)= \tr_V\sum_i u_iv_i=
\sum_i(u_i,v_i)=\dim\g$. So $\g=0$ and $\HgV=0$.
\end{proof}
\end{document}
% LocalWords: cocycle cocycles cohomology Casmir eigenspace eigenspaces