Skip to content

Commit 8e7928b

Browse files
committed
WIP: fix functional docs
1 parent f8fcef7 commit 8e7928b

File tree

1 file changed

+88
-55
lines changed

1 file changed

+88
-55
lines changed

odl/solvers/functional/default_functionals.py

Lines changed: 88 additions & 55 deletions
Original file line numberDiff line numberDiff line change
@@ -45,7 +45,6 @@
4545
#TODO:
4646
# - Add some documentation to `proximal`, `gradient` and `convex_conj`
4747
# (use See Also when applicable, otherwise a full doc)
48-
# - Unify citations
4948

5049

5150
class LpNorm(Functional):
@@ -1222,7 +1221,7 @@ def convex_conj(self):
12221221

12231222
@property
12241223
def proximal(self):
1225-
"""Return the proximal factory of the functional.
1224+
"""A proximal factory for this functional.
12261225
12271226
This is the zero operator.
12281227
"""
@@ -1256,37 +1255,44 @@ def __repr__(self):
12561255
allow_mixed_seps=False)
12571256

12581257

1259-
#TODO: continue here
1260-
12611258
class KullbackLeibler(Functional):
12621259

12631260
r"""The Kullback-Leibler divergence functional.
12641261
12651262
Notes
12661263
-----
1267-
The functional :math:`F` with prior :math:`g>=0` is given by:
1264+
The Kullback-Leibler divergence with prior :math:`g>=0` is defined as
12681265
12691266
.. math::
1270-
F(x)
1271-
=
1267+
\text{KL}(x)
1268+
&=
12721269
\begin{cases}
1273-
\sum_{i} \left( x_i - g_i + g_i \log \left( \frac{g_i}{x_i}
1274-
\right) \right) & \text{if } x_i > 0 \forall i
1270+
\sum_{i} \left( x_i - g_i + g_i \ln \left( \frac{g_i}{x_i}
1271+
\right) \right) & \text{if } x_i > 0 \text{ for all } i,
12751272
\\
1276-
+\infty & \text{else.}
1273+
+\infty & \text{otherwise.}
12771274
\end{cases}
1275+
\quad (\mathbb{R}^n\text{-like space}) \\[2ex]
1276+
\text{KL}(x)
1277+
&=
1278+
\begin{cases}
1279+
\int \left(
1280+
x(t) - g(t) + g(t) \ln\left(\frac{g(t)}{x(t)}\right)
1281+
\right)\, \mathrm{d}t & \text{if } x(t) > 0 \text{ for all } t,
1282+
\\
1283+
+\infty & \text{otherwise.}
1284+
\end{cases}
1285+
\quad (L^p-\text{like space})
12781286
1279-
Note that we use the common definition 0 log(0) := 0.
1280-
KL based objectives are common in MLEM optimization problems and are often
1287+
Note that we use the common convention :math:`0 \ln 0 := 0`.
1288+
KL-based objectives are common in MLEM optimization problems and are often
12811289
used as data-matching term when data noise governed by a multivariate
12821290
Poisson probability distribution is significant.
12831291
1284-
The functional is related to the Kullback-Leibler cross entropy functional
1285-
`KullbackLeiblerCrossEntropy`. The KL cross entropy is the one
1286-
diescribed in `this Wikipedia article
1287-
<https://en.wikipedia.org/wiki/Kullback%E2%80%93Leibler_divergence>`_, and
1288-
the functional :math:`F` is obtained by switching place of the prior and
1289-
the varialbe in the KL cross entropy functional.
1292+
This functional is related to the `KullbackLeiblerCrossEntropy`
1293+
described in `this Wikipedia article
1294+
<https://en.wikipedia.org/wiki/Kullback%E2%80%93Leibler_divergence>`_,
1295+
in that they have flipped roles of variable :math:`x` and prior :math:`g`.
12901296
12911297
For a theoretical exposition, see `[Csiszar1991]
12921298
<http://www.jstor.org/stable/2241918>`_.
@@ -1340,23 +1346,23 @@ def prior(self):
13401346
def _call(self, x):
13411347
"""Return ``self(x)``.
13421348
1343-
If any components of ``x`` is non-positive, the value is positive
1349+
If any component of ``x`` is non-positive, the value is positive
13441350
infinity.
13451351
"""
13461352
# Lazy import to improve `import odl` time
13471353
import scipy.special
13481354

13491355
if self.prior is None:
1350-
tmp = ((x - 1 - np.log(x)).inner(self.domain.one()))
1356+
integral = (x - 1 - np.log(x)).inner(self.domain.one())
13511357
else:
1352-
tmp = ((x - self.prior +
1353-
scipy.special.xlogy(self.prior, self.prior / x))
1354-
.inner(self.domain.one()))
1355-
if np.isnan(tmp):
1358+
integrand = (x - self.prior +
1359+
scipy.special.xlogy(self.prior, self.prior / x))
1360+
integral = integrand.inner(self.domain.one())
1361+
if np.isnan(integral):
13561362
# In this case, some element was less than or equal to zero
13571363
return np.inf
13581364
else:
1359-
return tmp
1365+
return integral
13601366

13611367
@property
13621368
def gradient(self):
@@ -1365,7 +1371,7 @@ def gradient(self):
13651371
For a prior :math:`g` is given by
13661372
13671373
.. math::
1368-
\nabla F(x) = 1 - \frac{g}{x}.
1374+
\nabla \text{KL}(x) = 1 - \frac{g}{x}.
13691375
13701376
The gradient is not defined if any component of :math:`x` is
13711377
non-positive.
@@ -1392,7 +1398,7 @@ def _call(self, x):
13921398

13931399
@property
13941400
def proximal(self):
1395-
"""Return the `proximal factory` of the functional.
1401+
"""A `proximal factory` for this functional.
13961402
13971403
See Also
13981404
--------
@@ -1401,12 +1407,17 @@ def proximal(self):
14011407
odl.solvers.nonsmooth.proximal_operators.proximal_convex_conj :
14021408
Proximal of the convex conjugate of a functional.
14031409
"""
1404-
return proximal_convex_conj(proximal_convex_conj_kl(space=self.domain,
1405-
g=self.prior))
1410+
return proximal_convex_conj(
1411+
proximal_convex_conj_kl(space=self.domain, g=self.prior))
14061412

14071413
@property
14081414
def convex_conj(self):
1409-
"""The convex conjugate functional of the KL-functional."""
1415+
"""The convex conjugate of the KL functional.
1416+
1417+
See Also
1418+
--------
1419+
KullbackLeiblerConvexConj
1420+
"""
14101421
return KullbackLeiblerConvexConj(self.domain, self.prior)
14111422

14121423
def __repr__(self):
@@ -1429,24 +1440,38 @@ def __repr__(self):
14291440

14301441
class KullbackLeiblerConvexConj(Functional):
14311442

1432-
r"""The convex conjugate of Kullback-Leibler divergence functional.
1443+
r"""The convex conjugate of the Kullback-Leibler divergence functional.
14331444
14341445
Notes
14351446
-----
1436-
The functional :math:`F^*` with prior :math:`g > 0` is given by
1447+
The convex conjugate :math:`\text{KL}^*` of the KL divergence is given
1448+
by
1449+
1450+
The Kullback-Leibler divergence with prior :math:`g>=0` is defined as
14371451
14381452
.. math::
1439-
F^*(x) =
1453+
\text{KL}(x)
1454+
&=
14401455
\begin{cases}
1441-
\sum_{i} \left( -g_i \ln(1 - x_i) \right)
1442-
& \text{if } x_i < 1 \forall i
1456+
\sum_{i} \left( -g_i \ln(1 - x_i) \right) & \text{if }
1457+
x_i < 1 \text{ for all } i,
14431458
\\
1444-
+\infty & \text{else}
1459+
+\infty & \text{otherwise.}
14451460
\end{cases}
1461+
\quad (\mathbb{R}^n\text{-like space}) \\[2ex]
1462+
\text{KL}(x)
1463+
&=
1464+
\begin{cases}
1465+
\int \big(-g(t)\ln\left(1 - x(t)\big)
1466+
\right)\, \mathrm{d}t & \text{if } x(t) < 1 \text{ for all } t,
1467+
\\
1468+
+\infty & \text{otherwise.}
1469+
\end{cases}
1470+
\quad (L^p-\text{like space})
14461471
14471472
See Also
14481473
--------
1449-
KullbackLeibler : convex conjugate functional
1474+
KullbackLeibler : convex conjugate
14501475
"""
14511476

14521477
def __init__(self, space, prior=None):
@@ -1472,37 +1497,42 @@ def __init__(self, space, prior=None):
14721497

14731498
@property
14741499
def prior(self):
1475-
"""The prior in convex conjugate Kullback-Leibler functional."""
1500+
"""The prior in the convex conjugate of the KL functional."""
14761501
return self.__prior
14771502

14781503
# TODO(#440): use integration operator when available
14791504
def _call(self, x):
14801505
"""Return ``self(x)``.
14811506
1482-
If any components of ``x`` is larger than or equal to 1, the value is
1507+
If any component of ``x`` is larger than or equal to 1, the value is
14831508
positive infinity.
14841509
"""
14851510
# Lazy import to improve `import odl` time
14861511
import scipy.special
14871512

14881513
if self.prior is None:
1489-
tmp = self.domain.element(
1490-
-1.0 * (np.log(1 - x))).inner(self.domain.one())
1514+
integral = (-1.0 * (np.log1p(-x))).inner(self.domain.one())
14911515
else:
1492-
tmp = self.domain.element(-scipy.special.xlogy(
1493-
self.prior, 1 - x)).inner(self.domain.one())
1494-
if np.isnan(tmp):
1516+
integrand = -scipy.special.xlog1py(self.prior, -x)
1517+
integral = integrand.inner(self.domain.one())
1518+
if np.isnan(integral):
14951519
# In this case, some element was larger than or equal to one
14961520
return np.inf
14971521
else:
1498-
return tmp
1522+
return integral
14991523

15001524
@property
15011525
def gradient(self):
1502-
"""Gradient operator of the functional.
1526+
"""Gradient operator of this functional.
15031527
1504-
The gradient is not defined in points where one or more components
1505-
are larger than or equal to one.
1528+
The gradient of the convex conjugate of the KL divergence is given
1529+
by
1530+
1531+
.. math::
1532+
\nabla \text{KL}^*(x) = \frac{g}{1 - x}.
1533+
1534+
The gradient is not defined in points where any component of :math:`x`
1535+
is (larger than or) equal to one.
15061536
"""
15071537
functional = self
15081538

@@ -1530,7 +1560,7 @@ def _call(self, x):
15301560

15311561
@property
15321562
def proximal(self):
1533-
"""Return the `proximal factory` of the functional.
1563+
"""A `proximal factory` for this functional.
15341564
15351565
See Also
15361566
--------
@@ -1543,7 +1573,10 @@ def proximal(self):
15431573

15441574
@property
15451575
def convex_conj(self):
1546-
"""The convex conjugate functional of the conjugate KL-functional."""
1576+
"""The convex conjugate functional of the KL convex conjugate.
1577+
1578+
This is the original KL divergence.
1579+
"""
15471580
return KullbackLeibler(self.domain, self.prior)
15481581

15491582
def __repr__(self):
@@ -1556,6 +1589,7 @@ def __repr__(self):
15561589
allow_mixed_seps=False)
15571590

15581591

1592+
#TODO: continue here
15591593
class KullbackLeiblerCrossEntropy(Functional):
15601594

15611595
r"""The Kullback-Leibler Cross Entropy divergence functional.
@@ -1568,7 +1602,7 @@ class KullbackLeiblerCrossEntropy(Functional):
15681602
F(x)
15691603
=
15701604
\begin{cases}
1571-
\sum_{i} \left( g_i - x_i + x_i \log \left( \frac{x_i}{g_i}
1605+
\sum_{i} \left( g_i - x_i + x_i \ln \left( \frac{x_i}{g_i}
15721606
\right) \right)
15731607
& \text{if } g_i > 0 \forall i
15741608
\\
@@ -1638,7 +1672,7 @@ def prior(self):
16381672
def _call(self, x):
16391673
"""Return ``self(x)``.
16401674
1641-
If any components of ``x`` is non-positive, the value is positive
1675+
If any component of ``x`` is non-positive, the value is positive
16421676
infinity.
16431677
"""
16441678
# Lazy import to improve `import odl` time
@@ -2204,7 +2238,7 @@ def __repr__(self):
22042238

22052239
class NuclearNorm(Functional):
22062240

2207-
r"""Nuclear norm for matrix valued functions.
2241+
r"""Nuclear norm for matrix-valued functions.
22082242
22092243
Notes
22102244
-----
@@ -2294,8 +2328,7 @@ def _asvector(self, arr):
22942328

22952329
def _call(self, x):
22962330
"""Return ``self(x)``."""
2297-
2298-
# Convert to array with most
2331+
# Convert to array with "outer" indices last
22992332
arr = self._asarray(x)
23002333
svd_diag = np.linalg.svd(arr, compute_uv=False)
23012334

0 commit comments

Comments
 (0)