Skip to content

Commit

Permalink
make data types more robust
Browse files Browse the repository at this point in the history
  • Loading branch information
Thomas Morris committed Apr 26, 2024
1 parent a43f7f3 commit c1d776a
Show file tree
Hide file tree
Showing 14 changed files with 107 additions and 279 deletions.
10 changes: 6 additions & 4 deletions src/blop/agent.py
Original file line number Diff line number Diff line change
Expand Up @@ -204,7 +204,7 @@ def sample(self, n: int = DEFAULT_MAX_SAMPLES, method: str = "quasi-random") ->
else:
raise ValueError("'method' argument must be one of ['quasi-random', 'random', 'grid'].")

return self.dofs(active=True).untransform(X)
return self.dofs(active=True).untransform(X).double()

def ask(self, acqf="qei", n=1, route=True, sequential=True, upsample=1, **acqf_kwargs):
"""Ask the agent for the best point to sample, given an acquisition function.
Expand Down Expand Up @@ -257,8 +257,8 @@ def ask(self, acqf="qei", n=1, route=True, sequential=True, upsample=1, **acqf_k
# we may pick up some more kwargs
acqf, acqf_kwargs = _construct_acqf(self, acqf_name=acqf_config["name"], **acqf_kwargs)

NUM_RESTARTS = 16
RAW_SAMPLES = 1024
NUM_RESTARTS = 8
RAW_SAMPLES = 256

candidates, acqf_obj = botorch.optim.optimize_acqf(
acq_function=acqf,
Expand All @@ -267,6 +267,7 @@ def ask(self, acqf="qei", n=1, route=True, sequential=True, upsample=1, **acqf_k
sequential=sequential,
num_restarts=NUM_RESTARTS,
raw_samples=RAW_SAMPLES, # used for intialization heuristic
fixed_features={i: dof._transform(dof.readback) for i, dof in enumerate(active_dofs) if dof.read_only},
)

# this includes both RO and non-RO DOFs.
Expand Down Expand Up @@ -604,8 +605,9 @@ def scalarized_fitnesses(self, weights="default", constrained=True):
fitness_objs = self.objectives(kind="fitness")
if len(fitness_objs) >= 1:
f = self.fitness_scalarization(weights=weights).evaluate(self.train_targets(active=True, kind="fitness"))
f = torch.where(f.isnan(), -np.inf, f) # remove all nans
else:
f = torch.zeros(len(self.table), dtype=torch.double)
f = torch.zeros(len(self.table), dtype=torch.double) # if there are no fitnesses, use a constant dummy fitness
if constrained:
# how many constraints are satisfied?
c = self.evaluated_constraints.sum(axis=-1)
Expand Down
6 changes: 4 additions & 2 deletions src/blop/bayesian/kernels.py
Original file line number Diff line number Diff line change
Expand Up @@ -165,8 +165,10 @@ def forward(self, x1, x2, diag=False, **params):
# adapted from the Matern kernel
mean = x1.reshape(-1, x1.size(-1)).mean(0)[(None,) * (x1.dim() - 1)]

trans_x1 = torch.matmul(self.latent_transform.unsqueeze(1), (x1 - mean).unsqueeze(-1)).squeeze(-1)
trans_x2 = torch.matmul(self.latent_transform.unsqueeze(1), (x2 - mean).unsqueeze(-1)).squeeze(-1)
transform = self.latent_transform.unsqueeze(1)

trans_x1 = torch.matmul(transform, (x1 - mean).unsqueeze(-1)).squeeze(-1)
trans_x2 = torch.matmul(transform, (x2 - mean).unsqueeze(-1)).squeeze(-1)

distance = self.covar_dist(trans_x1, trans_x2, diag=diag, **params)

Expand Down
File renamed without changes.
89 changes: 89 additions & 0 deletions src/blop/digestion/tests.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,89 @@
import numpy as np
import pandas as pd

from ..utils import functions


def himmelblau_digestion(df: pd.DataFrame) -> pd.DataFrame:
"""
Digests Himmelblau's function into the feedback.
"""
for index, entry in df.iterrows():
if not hasattr(entry, "x1"):
df.loc[index, "x1"] = x1 = 0
else:
x1 = entry.x1
if not hasattr(entry, "x2"):
df.loc[index, "x2"] = x2 = 0
else:
x2 = entry.x2
df.loc[index, "himmelblau"] = functions.himmelblau(x1=x1, x2=x2)
df.loc[index, "himmelblau_transpose"] = functions.himmelblau(x1=x2, x2=x1)

return df


def constrained_himmelblau_digestion(df: pd.DataFrame) -> pd.DataFrame:
"""
Digests Himmelblau's function into the feedback, constrained with NaN for a distance of more than 6 from the origin.
"""

df = himmelblau_digestion(df)
df.loc[:, "himmelblau"] = np.where(df.x1.values**2 + df.x1.values**2 < 36, df.himmelblau.values, np.nan)

return df


def sketchy_himmelblau_digestion(df: pd.DataFrame, p=0.1) -> pd.DataFrame:
"""
Evaluates the constrained Himmelblau, where every point is bad with probability p.
"""

df = constrained_himmelblau_digestion(df)
bad = np.random.choice(a=[True, False], size=len(df), p=[p, 1 - p])
df.loc[:, "himmelblau"] = np.where(bad, np.nan, df.himmelblau.values)

return df


"""
Chankong and Haimes function from https://en.wikipedia.org/wiki/Test_functions_for_optimization
"""


def chankong_and_haimes_digestion(df):
for index, entry in df.iterrows():
df.loc[index, "f1"] = (entry.x1 - 2) ** 2 + (entry.x2 - 1) + 2
df.loc[index, "f2"] = 9 * entry.x1 - (entry.x2 - 1) + 2
df.loc[index, "c1"] = entry.x1**2 + entry.x2**2
df.loc[index, "c2"] = entry.x1 - 3 * entry.x2 + 10

return df


def mock_kbs_digestion(df: pd.DataFrame) -> pd.DataFrame:
"""
Digests a beam waist and height into the feedback.
"""

for index, entry in df.iterrows():
sigma_x = functions.gaussian_beam_waist(entry.x1, entry.x2)
sigma_y = functions.gaussian_beam_waist(entry.x3, entry.x4)

df.loc[index, "x_width"] = 2 * sigma_x
df.loc[index, "y_width"] = 2 * sigma_y

return df


def binh_korn_digestion(df: pd.DataFrame) -> pd.DataFrame:
"""
Digests Himmelblau's function into the feedback.
"""

for index, entry in df.iterrows():
f1, f2 = functions.binh_korn(entry.x1, entry.x2)
df.loc[index, "f1"] = f1
df.loc[index, "f2"] = f2

return df
Empty file removed src/blop/experiments/__init__.py
Empty file.
Empty file.
Empty file removed src/blop/experiments/atf/atf.py
Empty file.
Empty file.
46 changes: 0 additions & 46 deletions src/blop/experiments/nsls2/iss.py

This file was deleted.

96 changes: 0 additions & 96 deletions src/blop/experiments/nsls2/tes.py

This file was deleted.

Empty file.
49 changes: 0 additions & 49 deletions src/blop/experiments/sirepo/tes.py

This file was deleted.

Loading

0 comments on commit c1d776a

Please sign in to comment.