Skip to content

Commit 7d9c5e7

Browse files
committed
add test optim
1 parent f8e822c commit 7d9c5e7

File tree

1 file changed

+65
-0
lines changed

1 file changed

+65
-0
lines changed

test/test_optim.py

Lines changed: 65 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,65 @@
1+
2+
3+
import ot
4+
import numpy as np
5+
6+
# import pytest
7+
8+
9+
def test_conditional_gradient():
10+
11+
n = 100 # nb bins
12+
13+
# bin positions
14+
x = np.arange(n, dtype=np.float64)
15+
16+
# Gaussian distributions
17+
a = ot.datasets.get_1D_gauss(n, m=20, s=5) # m= mean, s= std
18+
b = ot.datasets.get_1D_gauss(n, m=60, s=10)
19+
20+
# loss matrix
21+
M = ot.dist(x.reshape((n, 1)), x.reshape((n, 1)))
22+
M /= M.max()
23+
24+
def f(G):
25+
return 0.5 * np.sum(G**2)
26+
27+
def df(G):
28+
return G
29+
30+
reg = 1e-1
31+
32+
G, log = ot.optim.cg(a, b, M, reg, f, df, verbose=True, log=True)
33+
34+
assert np.allclose(a, G.sum(1))
35+
assert np.allclose(b, G.sum(0))
36+
37+
38+
def test_generalized_conditional_gradient():
39+
40+
n = 100 # nb bins
41+
42+
# bin positions
43+
x = np.arange(n, dtype=np.float64)
44+
45+
# Gaussian distributions
46+
a = ot.datasets.get_1D_gauss(n, m=20, s=5) # m= mean, s= std
47+
b = ot.datasets.get_1D_gauss(n, m=60, s=10)
48+
49+
# loss matrix
50+
M = ot.dist(x.reshape((n, 1)), x.reshape((n, 1)))
51+
M /= M.max()
52+
53+
def f(G):
54+
return 0.5 * np.sum(G**2)
55+
56+
def df(G):
57+
return G
58+
59+
reg1 = 1e-3
60+
reg2 = 1e-1
61+
62+
G, log = ot.optim.gcg(a, b, M, reg1, reg2, f, df, verbose=True, log=True)
63+
64+
assert np.allclose(a, G.sum(1), atol=1e-05)
65+
assert np.allclose(b, G.sum(0), atol=1e-05)

0 commit comments

Comments
 (0)