@@ -21,7 +21,7 @@ def test_config(self):
21
21
def test_single_step (self ):
22
22
optimizer = SGD (learning_rate = 0.5 )
23
23
self .assertEqual (len (optimizer .variables ), 2 )
24
- grads = np .array ([1.0 , 6.0 , 7.0 , 2.0 ])
24
+ grads = ops .array ([1.0 , 6.0 , 7.0 , 2.0 ])
25
25
vars = backend .Variable ([1.0 , 2.0 , 3.0 , 4.0 ])
26
26
optimizer .build ([vars ])
27
27
optimizer .apply_gradients (zip ([grads ], [vars ]))
@@ -32,7 +32,7 @@ def test_single_step(self):
32
32
33
33
def test_weight_decay (self ):
34
34
grads , var1 , var2 , var3 = (
35
- np .zeros (()),
35
+ ops .zeros (()),
36
36
backend .Variable (2.0 ),
37
37
backend .Variable (2.0 , name = "exclude" ),
38
38
backend .Variable (2.0 ),
@@ -56,8 +56,8 @@ def test_correctness_with_golden(self):
56
56
optimizer = SGD (nesterov = True )
57
57
58
58
x = backend .Variable (np .ones ([10 ]))
59
- grads = np .arange (0.1 , 1.1 , 0.1 )
60
- first_grads = np .full ((10 ,), 0.01 )
59
+ grads = ops .arange (0.1 , 1.1 , 0.1 )
60
+ first_grads = ops .full ((10 ,), 0.01 )
61
61
62
62
# fmt: off
63
63
golden = np .array (
0 commit comments