1
1
import numpy as np
2
2
3
3
from keras_core import backend
4
+ from keras_core import ops
4
5
from keras_core import testing
5
6
from keras_core .optimizers .adadelta import Adadelta
6
7
@@ -16,7 +17,7 @@ def test_config(self):
16
17
17
18
def test_single_step (self ):
18
19
optimizer = Adadelta (learning_rate = 0.5 )
19
- grads = np .array ([1.0 , 6.0 , 7.0 , 2.0 ])
20
+ grads = ops .array ([1.0 , 6.0 , 7.0 , 2.0 ])
20
21
vars = backend .Variable ([1.0 , 2.0 , 3.0 , 4.0 ])
21
22
optimizer .apply_gradients (zip ([grads ], [vars ]))
22
23
self .assertAllClose (
@@ -25,7 +26,7 @@ def test_single_step(self):
25
26
26
27
def test_weight_decay (self ):
27
28
grads , var1 , var2 , var3 = (
28
- np .zeros (()),
29
+ ops .zeros (()),
29
30
backend .Variable (2.0 ),
30
31
backend .Variable (2.0 , name = "exclude" ),
31
32
backend .Variable (2.0 ),
@@ -49,8 +50,8 @@ def test_correctness_with_golden(self):
49
50
optimizer = Adadelta (learning_rate = 1.0 , rho = 0.8 , epsilon = 1e-6 )
50
51
51
52
x = backend .Variable (np .ones ([10 ]))
52
- grads = np .arange (0.1 , 1.1 , 0.1 )
53
- first_grads = np .full ((10 ,), 0.01 )
53
+ grads = ops .arange (0.1 , 1.1 , 0.1 )
54
+ first_grads = ops .full ((10 ,), 0.01 )
54
55
55
56
golden = np .tile (
56
57
[[0.9978 ], [0.9947 ], [0.9915 ], [0.9882 ], [0.9849 ]], (1 , 10 )
0 commit comments