@@ -2476,11 +2476,11 @@ class BregmanDistance(Functional):
2476
2476
Notes
2477
2477
-----
2478
2478
Given a functional :math:`f`, which has a (sub)gradient :math:`\partial f`,
2479
- and given a point :math:`y`, the Bregman distance functional :math:`D_f`
2480
- in a point :math:`x` is given by
2479
+ and given a point :math:`y`, the Bregman distance functional
2480
+ :math:`D_f(\cdot, y)` in a point :math:`x` is given by
2481
2481
2482
2482
.. math::
2483
- D_f(x) = f(x) - f(y) - \langle \partial f(y), x - y \\ rangle.
2483
+ D_f(x, y ) = f(x) - f(y) - \langle \partial f(y), x - y \\ rangle.
2484
2484
2485
2485
2486
2486
References
@@ -2503,7 +2503,7 @@ def __init__(self, functional, point, subgradient_op=None):
2503
2503
optional argument `subgradient_op` is not given, the functional
2504
2504
needs to implement `functional.gradient`.
2505
2505
point : element of ``functional.domain``
2506
- The point from which to define the Bregman distance
2506
+ The point from which to define the Bregman distance.
2507
2507
subgradient_op : `Operator`, optional
2508
2508
The operator that takes an element in `functional.domain` and
2509
2509
returns a subgradient of the functional in that point.
@@ -2513,15 +2513,16 @@ def __init__(self, functional, point, subgradient_op=None):
2513
2513
--------
2514
2514
Example of initializing the Bregman distance functional:
2515
2515
2516
- >>> space = odl.uniform_discr(0, 2, 14 )
2516
+ >>> space = odl.uniform_discr(0, 1, 10 )
2517
2517
>>> l2_squared = odl.solvers.L2NormSquared(space)
2518
2518
>>> point = space.one()
2519
2519
>>> Bregman_dist = odl.solvers.BregmanDistance(l2_squared, point)
2520
2520
2521
- This is gives the shifted L2 norm squared ||x - 1||:
2521
+ This is gives the shifted L2 norm squared ||x - 1||^2 :
2522
2522
2523
- >>> Bregman_dist(space.zero())
2524
- 2.0
2523
+ >>> expected_value = l2_squared(space.one())
2524
+ >>> Bregman_dist(space.zero()) == expected_value
2525
+ True
2525
2526
"""
2526
2527
if not isinstance (functional , Functional ):
2527
2528
raise TypeError ('`functional` {} not an instance of ``Functional``'
@@ -2545,16 +2546,16 @@ def __init__(self, functional, point, subgradient_op=None):
2545
2546
'' .format (functional ))
2546
2547
else :
2547
2548
# Check that given subgradient is an operator that maps from the
2548
- # domain of the functional to the domain of the functional
2549
+ # domain of the functional to itself
2549
2550
if not isinstance (subgradient_op , Operator ):
2550
2551
raise TypeError ('`subgradient_op` {} is not an instance of '
2551
2552
'``Operator``' .format (subgradient_op ))
2552
- if not self . __functional . domain == subgradient_op .domain :
2553
+ if not subgradient_op . domain == self . __functional .domain :
2553
2554
raise ValueError ('`functional.domain` {} is not the same as '
2554
2555
'`subgradient_op.domain` {}'
2555
2556
'' .format (self .__functional .domain ,
2556
2557
subgradient_op .domain ))
2557
- if not self . __functional . domain == subgradient_op . range :
2558
+ if not subgradient_op . range == self . __functional . domain :
2558
2559
raise ValueError ('`functional.domain` {} is not the same as '
2559
2560
'`subgradient_op.range` {}'
2560
2561
'' .format (self .__functional .domain ,
@@ -2568,7 +2569,8 @@ def __init__(self, functional, point, subgradient_op=None):
2568
2569
2569
2570
super (BregmanDistance , self ).__init__ (
2570
2571
space = functional .domain , linear = False ,
2571
- grad_lipschitz = self .__functional .grad_lipschitz )
2572
+ grad_lipschitz = (self .__functional .grad_lipschitz +
2573
+ self .__subgrad_eval .norm ()))
2572
2574
2573
2575
@property
2574
2576
def functional (self ):
@@ -2606,11 +2608,9 @@ def gradient(self):
2606
2608
2607
2609
def __repr__ (self ):
2608
2610
'''Return ``repr(self)``.'''
2609
- return '{}({!r}, {!r}, {!r}, {!r})' .format (self .__class__ .__name__ ,
2610
- self .domain ,
2611
- self .functional ,
2612
- self .point ,
2613
- self .subgradient_op )
2611
+ return '{}({!r}, {!r}, {!r})' .format (self .__class__ .__name__ ,
2612
+ self .functional , self .point ,
2613
+ self .subgradient_op )
2614
2614
2615
2615
2616
2616
if __name__ == '__main__' :
0 commit comments