Skip to content

Commit

Permalink
Merge pull request PrincetonUniversity#2592 from kmantel/gradopt
Browse files Browse the repository at this point in the history
GradientOptimization: stop error on default bounds
  • Loading branch information
kmantel authored Jan 27, 2023
2 parents 938de00 + 93b5411 commit 2e55a2e
Showing 1 changed file with 4 additions and 2 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -1189,6 +1189,8 @@ def reset(self, default_variable=None, objective_function=None, context=None, **

if self.owner:
owner_str = ' of {self.owner.name}'
else:
owner_str = ''

# Get bounds from search_space if it has any non-None entries
if any(i is not None for i in self.search_space):
Expand Down Expand Up @@ -1243,9 +1245,9 @@ def reset(self, default_variable=None, objective_function=None, context=None, **
# Array specified for upper bound, so replace any None's with +inf
upper = np.array([[float('inf')] if n[0] is None else n for n in upper.reshape(sample_len,1)])

if not all(lower<upper):
if not all(lower <= upper):
raise OptimizationFunctionError(f"Specification of {repr(BOUNDS)} arg ({bounds}) for {self.name}"
f"{owner_str} resulted in lower >= corresponding upper for one or "
f"{owner_str} resulted in lower > corresponding upper for one or "
f"more elements (lower: {lower.tolist()}; uuper: {upper.tolist()}).")

bounds = (lower,upper)
Expand Down

0 comments on commit 2e55a2e

Please sign in to comment.