Skip to content

Commit

Permalink
removed plot kwarg, added end_time kwarg, expanded the test
Browse files Browse the repository at this point in the history
  • Loading branch information
Peter230655 committed Feb 28, 2025
1 parent 73e52bb commit d89338b
Show file tree
Hide file tree
Showing 2 changed files with 171 additions and 55 deletions.
14 changes: 6 additions & 8 deletions opty/direct_collocation.py
Original file line number Diff line number Diff line change
Expand Up @@ -685,14 +685,15 @@ def parse_free(self, free):

return parse_free(free, n, q, N, variable_duration)

def create_linear_initial_guess(self, plot=False):
def create_linear_initial_guess(self, end_time=1.0):
"""Creates an initial guess that is the linear interpolation between
exact instance constraints. Please see the notes for more information.
Parameters
----------
plot : bool, optional (default=False)
If True, the initial guess will be plotted.
end_time : float, optional (default=1.0)
In case of a variable time interval, this is the assumed duration
of the simulation.
Returns
-------
Expand Down Expand Up @@ -904,16 +905,13 @@ def create_linear_initial_guess(self, plot=False):
initial_guess[-1] = wert

if self.bounds is None:
initial_guess[-1] = 1.0 / (num_nodes-1)
initial_guess[-1] = end_time / (num_nodes-1)

elif self.collocator.node_time_interval not in self.bounds.keys():
initial_guess[-1] = 1.0 / (num_nodes-1)
initial_guess[-1] = end_time / (num_nodes-1)
else:
pass

if plot:
self.plot_trajectories(initial_guess)

return initial_guess


Expand Down
212 changes: 165 additions & 47 deletions opty/tests/test_direct_collocation.py
Original file line number Diff line number Diff line change
Expand Up @@ -1948,7 +1948,7 @@ def test_linear_initial_guess(plot=False):
num_nodes = 61

# A: CONSTANT TIME INTERVAL
# A!: normal
# A0: no bounds, no instance constraints
t0, t1, t2, tf = 0.0, 2.0, 4.0, 6.0
interval_value = tf/(num_nodes - 1)

Expand All @@ -1961,6 +1961,24 @@ def obj_grad(free):
grad[0: 2*num_nodes] = 2.0*free[0:2*num_nodes]*interval_value
return grad

prob = Problem(
obj,
obj_grad,
eom,
state_symbols,
num_nodes,
interval_value,
known_parameter_map=par_map,
time_symbol=t,
backend='numpy',
)

expected_guess = np.zeros(prob.num_free)
initial_guess = prob.create_linear_initial_guess()
np.testing.assert_allclose(initial_guess, expected_guess)

# A1: no bounds

instance_constraints = (
x.func(t0),
y.func(t0) - 2.0,
Expand All @@ -1978,19 +1996,6 @@ def obj_grad(free):
ux.func(tf),
)

bounds= {
a1: (-1.0, 10.0),
a2: (-1.0, 12.0),

u1: (-10.0, 1.0),
u2: (-1.0, 10.0),

x: (-5.0, 5.0 ),
ux: (-1.0, 1.0),
y: (-4.0, 4.0),
uy: (-1.0, 1.0),
}

prob = Problem(
obj,
obj_grad,
Expand All @@ -2000,8 +2005,8 @@ def obj_grad(free):
interval_value,
known_parameter_map=par_map,
instance_constraints=instance_constraints,
bounds=bounds,
time_symbol=t,
backend='numpy',
)

# Set the expected initial guess
Expand Down Expand Up @@ -2041,6 +2046,48 @@ def obj_grad(free):
# uy - guess
expected_guess[3*num_nodes:4*num_nodes] = 4.5
# u1 - guess
expected_guess[4*num_nodes:5*num_nodes] = 0.0
# u2 - guess
expected_guess[5*num_nodes:6*num_nodes] = 0.0
# a1 - guess
expected_guess[6*num_nodes] = 0.0
# a2 - guess
expected_guess[6*num_nodes+1] = 0.0

initial_guess = prob.create_linear_initial_guess()
np.testing.assert_allclose(initial_guess, expected_guess)

# A2 normal

bounds= {
a1: (-1.0, 10.0),
a2: (-1.0, 12.0),

u1: (-10.0, 1.0),
u2: (-1.0, 10.0),

x: (-5.0, 5.0 ),
ux: (-1.0, 1.0),
y: (-4.0, 4.0),
uy: (-1.0, 1.0),
}

prob = Problem(
obj,
obj_grad,
eom,
state_symbols,
num_nodes,
interval_value,
known_parameter_map=par_map,
instance_constraints=instance_constraints,
bounds=bounds,
time_symbol=t,
backend='numpy',
)

# Set new expected guesses as bounds are present
# u1 - guess
expected_guess[4*num_nodes:5*num_nodes] = -9.0/2.0
# u2 - guess
expected_guess[5*num_nodes:6*num_nodes] = 9.0/2.0
Expand All @@ -2052,7 +2099,7 @@ def obj_grad(free):
initial_guess = prob.create_linear_initial_guess()
np.testing.assert_allclose(initial_guess, expected_guess)

# A2: np.inf, -np.inf in bounds
# A3: np.inf, -np.inf in bounds
bounds[a1] = (-np.inf, 10.0)
bounds[a2] = (-10.0, np.inf)

Expand All @@ -2067,6 +2114,7 @@ def obj_grad(free):
instance_constraints=instance_constraints,
bounds=bounds,
time_symbol=t,
backend='numpy',
)

expected_guess[6*num_nodes] = 10.0
Expand All @@ -2075,7 +2123,7 @@ def obj_grad(free):
initial_guess = prob.create_linear_initial_guess()
np.testing.assert_allclose(initial_guess, expected_guess)

# A3: no bounds
# A4: no bounds
expected_guess[4*num_nodes: 5*num_nodes] = 0.0
expected_guess[5*num_nodes: 6*num_nodes] = 0.0
expected_guess[6*num_nodes] = 0.0
Expand All @@ -2091,12 +2139,14 @@ def obj_grad(free):
known_parameter_map=par_map,
instance_constraints=instance_constraints,
time_symbol=t,
backend='numpy',
)

initial_guess = prob.create_linear_initial_guess()
np.testing.assert_allclose(initial_guess, expected_guess)

# A4: state instances in instance_constraints

# A5: state instances in instance_constraints
instance_constraints = (
x.func(t0) - 3.0 + ux.func(tf),
y.func(t0) - 2.0,
Expand Down Expand Up @@ -2128,16 +2178,19 @@ def obj_grad(free):
known_parameter_map=par_map,
instance_constraints=instance_constraints,
time_symbol=t,
backend='numpy',
)

initial_guess = prob.create_linear_initial_guess()
np.testing.assert_allclose(initial_guess, expected_guess)


# ========================================================================
# B: VARIABLE TIME INTERVAL
# B1: normal
# B0: no bounds no instances
h = sym.symbols('h')
t00, t10, t20, tf0 = 0.0, int(num_nodes/3)*h, int(2*num_nodes/3)*h, (num_nodes - 1)*h
t00, t10, t20 = 0.0, int(num_nodes/3)*h, int(2*num_nodes/3)*h
tf0 = (num_nodes - 1)*h
interval_value = h

def obj(free):
Expand All @@ -2149,6 +2202,28 @@ def obj_grad(free):
grad[0: 2*num_nodes] = 2.0*free[0:2*num_nodes]*free[-1]
return grad

prob = Problem(
obj,
obj_grad,
eom,
state_symbols,
num_nodes,
interval_value,
known_parameter_map=par_map,
time_symbol=t,
backend='numpy',
)

expected_guess = np.zeros(prob.num_free)
expected_guess[-1] = 1.0 / (num_nodes-1)
initial_guess = prob.create_linear_initial_guess()
np.testing.assert_allclose(initial_guess, expected_guess)

initial_guess = prob.create_linear_initial_guess(end_time=2.0)
expected_guess[-1] = 2.0 / (num_nodes-1)
np.testing.assert_allclose(initial_guess, expected_guess)

# B1: no bounds
instance_constraints = (
x.func(t00),
y.func(t00),
Expand All @@ -2166,33 +2241,6 @@ def obj_grad(free):
ux.func(tf0) + 5.0,
)

bounds= {
a1: (-1.0, 10.0),
a2: (-1.0, 12.0),

u1: (-10.0, 1.0),
u2: (-1.0, 10.0),

x: (-5.0, 5.0 ),
ux: (-1.0, 1.0),
y: (-4.0, 4.0),
uy: (-1.0, 1.0),
h: (1.0, 2.0),
}

prob = Problem(
obj,
obj_grad,
eom,
state_symbols,
num_nodes,
interval_value,
known_parameter_map=par_map,
instance_constraints=instance_constraints,
bounds=bounds,
time_symbol=t,
)

# Set the expected initial guess
expected_guess = np.zeros(prob.num_free)
t0, t1, t2 = int(0.0), int(num_nodes/3), int(2*num_nodes/3)
Expand Down Expand Up @@ -2232,6 +2280,68 @@ def obj_grad(free):
# uy - guess
expected_guess[3*num_nodes:4*num_nodes] = 4.5
# u1 - guess
expected_guess[4*num_nodes:5*num_nodes] = 0
# u2 - guess
expected_guess[5*num_nodes:6*num_nodes] = 0
# a1 - guess
expected_guess[6*num_nodes] = 0
# a2 - guess
expected_guess[6*num_nodes+1] = 0
# h - guess
expected_guess[-1] = 1.0 / (num_nodes-1)

prob = Problem(
obj,
obj_grad,
eom,
state_symbols,
num_nodes,
interval_value,
known_parameter_map=par_map,
instance_constraints=instance_constraints,
time_symbol=t,
backend='numpy',
)

initial_guess = prob.create_linear_initial_guess()
np.testing.assert_allclose(initial_guess, expected_guess)

initial_guess = prob.create_linear_initial_guess(end_time=3.0)
expected_guess[-1] = 3.0 / (num_nodes-1)
np.testing.assert_allclose(initial_guess, expected_guess)


# B3 normal
bounds= {
a1: (-1.0, 10.0),
a2: (-1.0, 12.0),

u1: (-10.0, 1.0),
u2: (-1.0, 10.0),

x: (-5.0, 5.0 ),
ux: (-1.0, 1.0),
y: (-4.0, 4.0),
uy: (-1.0, 1.0),
h: (1.0, 2.0),
}

prob = Problem(
obj,
obj_grad,
eom,
state_symbols,
num_nodes,
interval_value,
known_parameter_map=par_map,
instance_constraints=instance_constraints,
bounds=bounds,
time_symbol=t,
backend='numpy',
)

# Set new expected guesses as bounds are present
# u1 - guess
expected_guess[4*num_nodes:5*num_nodes] = -9.0/2.0
# u2 - guess
expected_guess[5*num_nodes:6*num_nodes] = 9.0/2.0
Expand All @@ -2245,6 +2355,11 @@ def obj_grad(free):
initial_guess = prob.create_linear_initial_guess()
np.testing.assert_allclose(initial_guess, expected_guess)

initial_guess = prob.create_linear_initial_guess(end_time=4.0)
# as bound for h is given, end_time has no effect.
expected_guess[-1] = 1.5
np.testing.assert_allclose(initial_guess, expected_guess)

# B2: np.inf, -np.inf in bounds
bounds[a1] = (-np.inf, 10.0)
bounds[a2] = (-10.0, np.inf)
Expand All @@ -2260,6 +2375,7 @@ def obj_grad(free):
instance_constraints=instance_constraints,
bounds=bounds,
time_symbol=t,
backend='numpy',
)

expected_guess[6*num_nodes] = 10.0
Expand All @@ -2285,6 +2401,7 @@ def obj_grad(free):
known_parameter_map=par_map,
instance_constraints=instance_constraints,
time_symbol=t,
backend='numpy',
)
initial_guess = prob.create_linear_initial_guess()
np.testing.assert_allclose(initial_guess, expected_guess)
Expand Down Expand Up @@ -2321,6 +2438,7 @@ def obj_grad(free):
known_parameter_map=par_map,
instance_constraints=instance_constraints,
time_symbol=t,
backend='numpy',
)

initial_guess = prob.create_linear_initial_guess()
Expand Down

0 comments on commit d89338b

Please sign in to comment.