From 5c5475055fc58d902ebdc442142f306947aca200 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Konrad=20V=C3=B6lkel?= <konrad.voelkel@hhu.de>
Date: Fri, 19 Jul 2024 12:25:50 +0200
Subject: [PATCH] +missing -

---
 expectation-maximization.ipynb | 2 +-
 relative-entropie.md           | 2 +-
 2 files changed, 2 insertions(+), 2 deletions(-)

diff --git a/expectation-maximization.ipynb b/expectation-maximization.ipynb
index 021dc4e..9bbad51 100644
--- a/expectation-maximization.ipynb
+++ b/expectation-maximization.ipynb
@@ -5656,7 +5656,7 @@
    "name": "python",
    "nbconvert_exporter": "python",
    "pygments_lexer": "ipython3",
-   "version": "3.8.10"
+   "version": "3.9.2"
   }
  },
  "nbformat": 4,
diff --git a/relative-entropie.md b/relative-entropie.md
index 7e75cb6..c044703 100644
--- a/relative-entropie.md
+++ b/relative-entropie.md
@@ -302,7 +302,7 @@ Sei $P$ eine stetige Verteilung mit Dichtefunktion $f$, die auf einer Menge $\Om
 Dann heißt
 
 $$
-h(X) := \mathbb{E}\left( -\log(f(X)) \right) = \int_\Omega f(x) \log f(x) dx
+h(X) := \mathbb{E}\left( -\log(f(X)) \right) = - \int_\Omega f(x) \log f(x) dx
 $$
 
 die *differentielle Entropie* von $X$ (definiert ebenfalls von Claude Shannon).
-- 
GitLab