diff --git a/src/blop/agent.py b/src/blop/agent.py index 18a81d8..49c69a0 100644 --- a/src/blop/agent.py +++ b/src/blop/agent.py @@ -204,7 +204,7 @@ def sample(self, n: int = DEFAULT_MAX_SAMPLES, method: str = "quasi-random") -> else: raise ValueError("'method' argument must be one of ['quasi-random', 'random', 'grid'].") - return self.dofs(active=True).untransform(X) + return self.dofs(active=True).untransform(X).double() def ask(self, acqf="qei", n=1, route=True, sequential=True, upsample=1, **acqf_kwargs): """Ask the agent for the best point to sample, given an acquisition function. @@ -257,8 +257,8 @@ def ask(self, acqf="qei", n=1, route=True, sequential=True, upsample=1, **acqf_k # we may pick up some more kwargs acqf, acqf_kwargs = _construct_acqf(self, acqf_name=acqf_config["name"], **acqf_kwargs) - NUM_RESTARTS = 16 - RAW_SAMPLES = 1024 + NUM_RESTARTS = 8 + RAW_SAMPLES = 256 candidates, acqf_obj = botorch.optim.optimize_acqf( acq_function=acqf, @@ -267,6 +267,7 @@ def ask(self, acqf="qei", n=1, route=True, sequential=True, upsample=1, **acqf_k sequential=sequential, num_restarts=NUM_RESTARTS, raw_samples=RAW_SAMPLES, # used for intialization heuristic + fixed_features={i: dof._transform(dof.readback) for i, dof in enumerate(active_dofs) if dof.read_only}, ) # this includes both RO and non-RO DOFs. @@ -604,8 +605,9 @@ def scalarized_fitnesses(self, weights="default", constrained=True): fitness_objs = self.objectives(kind="fitness") if len(fitness_objs) >= 1: f = self.fitness_scalarization(weights=weights).evaluate(self.train_targets(active=True, kind="fitness")) + f = torch.where(f.isnan(), -np.inf, f) # remove all nans else: - f = torch.zeros(len(self.table), dtype=torch.double) + f = torch.zeros(len(self.table), dtype=torch.double) # if there are no fitnesses, use a constant dummy fitness if constrained: # how many constraints are satisfied? c = self.evaluated_constraints.sum(axis=-1) diff --git a/src/blop/bayesian/kernels.py b/src/blop/bayesian/kernels.py index ecc7d5c..b847469 100644 --- a/src/blop/bayesian/kernels.py +++ b/src/blop/bayesian/kernels.py @@ -165,8 +165,10 @@ def forward(self, x1, x2, diag=False, **params): # adapted from the Matern kernel mean = x1.reshape(-1, x1.size(-1)).mean(0)[(None,) * (x1.dim() - 1)] - trans_x1 = torch.matmul(self.latent_transform.unsqueeze(1), (x1 - mean).unsqueeze(-1)).squeeze(-1) - trans_x2 = torch.matmul(self.latent_transform.unsqueeze(1), (x2 - mean).unsqueeze(-1)).squeeze(-1) + transform = self.latent_transform.unsqueeze(1) + + trans_x1 = torch.matmul(transform, (x1 - mean).unsqueeze(-1)).squeeze(-1) + trans_x2 = torch.matmul(transform, (x2 - mean).unsqueeze(-1)).squeeze(-1) distance = self.covar_dist(trans_x1, trans_x2, diag=diag, **params) diff --git a/src/blop/digestion.py b/src/blop/digestion/__init__.py similarity index 100% rename from src/blop/digestion.py rename to src/blop/digestion/__init__.py diff --git a/src/blop/digestion/tests.py b/src/blop/digestion/tests.py new file mode 100644 index 0000000..f194769 --- /dev/null +++ b/src/blop/digestion/tests.py @@ -0,0 +1,89 @@ +import numpy as np +import pandas as pd + +from ..utils import functions + + +def himmelblau_digestion(df: pd.DataFrame) -> pd.DataFrame: + """ + Digests Himmelblau's function into the feedback. + """ + for index, entry in df.iterrows(): + if not hasattr(entry, "x1"): + df.loc[index, "x1"] = x1 = 0 + else: + x1 = entry.x1 + if not hasattr(entry, "x2"): + df.loc[index, "x2"] = x2 = 0 + else: + x2 = entry.x2 + df.loc[index, "himmelblau"] = functions.himmelblau(x1=x1, x2=x2) + df.loc[index, "himmelblau_transpose"] = functions.himmelblau(x1=x2, x2=x1) + + return df + + +def constrained_himmelblau_digestion(df: pd.DataFrame) -> pd.DataFrame: + """ + Digests Himmelblau's function into the feedback, constrained with NaN for a distance of more than 6 from the origin. + """ + + df = himmelblau_digestion(df) + df.loc[:, "himmelblau"] = np.where(df.x1.values**2 + df.x1.values**2 < 36, df.himmelblau.values, np.nan) + + return df + + +def sketchy_himmelblau_digestion(df: pd.DataFrame, p=0.1) -> pd.DataFrame: + """ + Evaluates the constrained Himmelblau, where every point is bad with probability p. + """ + + df = constrained_himmelblau_digestion(df) + bad = np.random.choice(a=[True, False], size=len(df), p=[p, 1 - p]) + df.loc[:, "himmelblau"] = np.where(bad, np.nan, df.himmelblau.values) + + return df + + +""" +Chankong and Haimes function from https://en.wikipedia.org/wiki/Test_functions_for_optimization +""" + + +def chankong_and_haimes_digestion(df): + for index, entry in df.iterrows(): + df.loc[index, "f1"] = (entry.x1 - 2) ** 2 + (entry.x2 - 1) + 2 + df.loc[index, "f2"] = 9 * entry.x1 - (entry.x2 - 1) + 2 + df.loc[index, "c1"] = entry.x1**2 + entry.x2**2 + df.loc[index, "c2"] = entry.x1 - 3 * entry.x2 + 10 + + return df + + +def mock_kbs_digestion(df: pd.DataFrame) -> pd.DataFrame: + """ + Digests a beam waist and height into the feedback. + """ + + for index, entry in df.iterrows(): + sigma_x = functions.gaussian_beam_waist(entry.x1, entry.x2) + sigma_y = functions.gaussian_beam_waist(entry.x3, entry.x4) + + df.loc[index, "x_width"] = 2 * sigma_x + df.loc[index, "y_width"] = 2 * sigma_y + + return df + + +def binh_korn_digestion(df: pd.DataFrame) -> pd.DataFrame: + """ + Digests Himmelblau's function into the feedback. + """ + + for index, entry in df.iterrows(): + f1, f2 = functions.binh_korn(entry.x1, entry.x2) + df.loc[index, "f1"] = f1 + df.loc[index, "f2"] = f2 + + return df diff --git a/src/blop/experiments/__init__.py b/src/blop/experiments/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/src/blop/experiments/atf/__init__.py b/src/blop/experiments/atf/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/src/blop/experiments/atf/atf.py b/src/blop/experiments/atf/atf.py deleted file mode 100644 index e69de29..0000000 diff --git a/src/blop/experiments/nsls2/__init__.py b/src/blop/experiments/nsls2/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/src/blop/experiments/nsls2/iss.py b/src/blop/experiments/nsls2/iss.py deleted file mode 100644 index 6a76a98..0000000 --- a/src/blop/experiments/nsls2/iss.py +++ /dev/null @@ -1,46 +0,0 @@ -import bluesky.plan_stubs as bps -import bluesky.plans as bp -import numpy as np - -from blop.tasks import Task - - -def initialize(): - yield from bps.null() # do nothing - - -class MaxBeamFlux(Task): - name = "max_flux" - - def get_fitness(processed_entry): - return getattr(processed_entry, "flux") - - -def acquisition(dofs, inputs, dets): - uid = yield from bp.list_scan(dets, *[_ for items in zip(dofs, np.atleast_2d(inputs).T) for _ in items]) - return uid - - -def flux_digestion(db, uid): - """ - This method eats the output of a Bluesky scan, and returns a dict with inputs for the tasks - """ - - table = db[uid].table(fill=True) - - products_keys = [ - "image", - "vertical_extent", - "horizontal_extent", - "flux", - "x_pos", - "y_pos", - "x_width", - "y_width", - ] - products = {key: [] for key in products_keys} - - for index, entry in table.iterrows(): - products["apb_ch4"].append(getattr(entry, "apb_ch4")) - - return products diff --git a/src/blop/experiments/nsls2/tes.py b/src/blop/experiments/nsls2/tes.py deleted file mode 100644 index c0a495a..0000000 --- a/src/blop/experiments/nsls2/tes.py +++ /dev/null @@ -1,96 +0,0 @@ -import time as ttime - -import bluesky.plan_stubs as bps -import bluesky.plans as bp # noqa F401 -import numpy as np - -from ... import utils - -TARGET_BEAM_WIDTH_X = 0 -TARGET_BEAM_WIDTH_Y = 0 -BEAM_PROP = 0.5 -MIN_SEPARABILITY = 0.1 -OOB_REL_BUFFER = 1 / 32 - - -def initialize(shutter, detectors): - timeout = 10 - - yield from bps.mv(shutter.close_cmd, 1) - yield from bps.sleep(2.0) - - start_time = ttime.monotonic() - while (ttime.monotonic() - start_time < timeout) and (shutter.status.get(as_string=True) == "Open"): - print(f"Shutter not closed, retrying ... (closed_status = {shutter.status.get(as_string=True)})") - yield from bps.sleep(1.0) - yield from bps.mv(shutter.close_cmd, 1) - - uid = yield from bp.count(detectors) - - yield from bps.mv(shutter.open_cmd, 1) - yield from bps.sleep(2.0) - - timeout = 10 - start_time = ttime.monotonic() - while (ttime.monotonic() - start_time < timeout) and (shutter.status.get(as_string=True) != "Open"): - print(f"Shutter not open, retrying ... (closed_status = {shutter.status.get(as_string=True)})") - yield from bps.sleep(1.0) - yield from bps.mv(shutter.open_cmd, 1) - - yield from bps.sleep(10.0) - - return uid - - -def fitness(entry, args): - image = getattr(entry, args["image"]) - # if (args['flux'] not in entry.index) or (args['flux'] is not None): - # flux = 1e0 - # else: - flux = getattr(entry, args["flux"]) - - background = args["background"] - - x_min, x_max, y_min, y_max, separability = utils.get_beam_stats(image - background, beam_prop=args["beam_prop"]) - - n_y, n_x = image.shape - - # u, s, v = np.linalg.svd(image - background) - - # separability = np.square(s[0]) / np.square(s).sum() - - # ymode, xmode = u[:,0], v[0] - - # x_roots = utils.estimate_root_indices(np.abs(xmode) - args['beam_prop'] * np.abs(xmode).max()) - # y_roots = utils.estimate_root_indices(np.abs(ymode) - args['beam_prop'] * np.abs(ymode).max()) - - # x_min, x_max = x_roots.min(), x_roots.max() - # y_min, y_max = y_roots.min(), y_roots.max() - - width_x = x_max - x_min - width_y = y_max - y_min - - bad = x_min < (n_x + 1) * args["OOB_rel_buffer"] - bad |= x_max > (n_x + 1) * (1 - args["OOB_rel_buffer"]) - bad |= y_min < (n_y + 1) * args["OOB_rel_buffer"] - bad |= y_max > (n_y + 1) * (1 - args["OOB_rel_buffer"]) - bad |= separability < args["min_separability"] - bad |= width_x < 2 - bad |= width_y < 2 - - if bad: - fitness = np.nan - - else: - fitness = np.log(flux * separability / (width_x**2 + width_y**2)) - - return ("fitness", "x_min", "x_max", "y_min", "y_max", "width_x", "width_y", "separability"), ( - fitness, - x_min, - x_max, - y_min, - y_max, - width_x, - width_y, - separability, - ) diff --git a/src/blop/experiments/sirepo/__init__.py b/src/blop/experiments/sirepo/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/src/blop/experiments/sirepo/tes.py b/src/blop/experiments/sirepo/tes.py deleted file mode 100644 index cbf0599..0000000 --- a/src/blop/experiments/sirepo/tes.py +++ /dev/null @@ -1,49 +0,0 @@ -import numpy as np - - -def w8_digestion(db, uid): - return image_digestion(db, uid, image_name="w8") - - -def w9_digestion(db, uid): - return image_digestion(db, uid, image_name="w9") - - -def image_digestion(db, uid, image_name): - products = db[uid].table(fill=True) - - for index, entry in products.iterrows(): - image = getattr(entry, f"{image_name}_image") - horizontal_extent = getattr(entry, f"{image_name}_horizontal_extent") - vertical_extent = getattr(entry, f"{image_name}_vertical_extent") - - flux = image.sum() - n_y, n_x = image.shape - - # reject if there is no flux, or we can't estimate the position and size of the beam for some reason - bad = False - bad |= not (flux > 0) - if not bad: - X, Y = np.meshgrid(np.linspace(*horizontal_extent, n_x), np.linspace(*vertical_extent, n_y)) - - mean_x = np.sum(X * image) / np.sum(image) - mean_y = np.sum(Y * image) / np.sum(image) - - sigma_x = np.sqrt(np.sum((X - mean_x) ** 2 * image) / np.sum(image)) - sigma_y = np.sqrt(np.sum((Y - mean_y) ** 2 * image) / np.sum(image)) - - bad |= any(np.isnan([mean_x, mean_y, sigma_x, sigma_y])) - - if not bad: - products.loc[index, ["flux", "x_pos", "y_pos", "x_width", "y_width"]] = ( - flux, - mean_x, - mean_y, - 2 * sigma_x, - 2 * sigma_y, - ) - else: - for col in ["flux", "x_pos", "y_pos", "x_width", "y_width"]: - products.loc[index, col] = np.nan - - return products diff --git a/src/blop/tests/conftest.py b/src/blop/tests/conftest.py index 6bba490..9de6513 100644 --- a/src/blop/tests/conftest.py +++ b/src/blop/tests/conftest.py @@ -9,8 +9,8 @@ from databroker import Broker from blop import DOF, Agent, Objective +from blop.digestion.tests import chankong_and_haimes_digestion, sketchy_himmelblau_digestion from blop.dofs import BrownianMotion -from blop.utils import functions @pytest.fixture(scope="function") @@ -60,21 +60,21 @@ def RE(db): def get_agent(param): """ - Generate a bunch of different agents + Generate a bunch of different agents. """ if param == "1d_1f": return Agent( dofs=DOF(description="The first DOF", name="x1", search_domain=(-5.0, 5.0)), objectives=Objective(description="Himmelblau’s function", name="himmelblau", target="min"), - digestion=functions.himmelblau_digestion, + digestion=sketchy_himmelblau_digestion, ) elif param == "1d_1c": return Agent( dofs=DOF(description="The first DOF", name="x1", search_domain=(-5.0, 5.0)), objectives=Objective(description="Himmelblau’s function", name="himmelblau", target=(95, 105)), - digestion=functions.himmelblau_digestion, + digestion=sketchy_himmelblau_digestion, ) elif param == "2d_1f": @@ -84,7 +84,7 @@ def get_agent(param): DOF(description="The first DOF", name="x2", search_domain=(-5.0, 5.0)), ], objectives=Objective(description="Himmelblau’s function", name="himmelblau", target="min"), - digestion=functions.himmelblau_digestion, + digestion=sketchy_himmelblau_digestion, ) elif param == "2d_1f_1c": @@ -97,7 +97,7 @@ def get_agent(param): Objective(description="Himmelblau’s function", name="himmelblau", target="min"), Objective(description="Himmelblau’s function", name="himmelblau", target=(95, 105)), ], - digestion=functions.himmelblau_digestion, + digestion=sketchy_himmelblau_digestion, ) elif param == "2d_2f_2c": @@ -112,7 +112,7 @@ def get_agent(param): Objective(description="c1", name="c1", target=(-np.inf, 225)), Objective(description="c2", name="c2", target=(-np.inf, 0)), ], - digestion=functions.chankong_and_haimes_digestion, + digestion=chankong_and_haimes_digestion, ) elif param == "3d_2r_2f_1c": @@ -129,7 +129,7 @@ def get_agent(param): Objective(name="himmelblau_transpose", target="min"), Objective(description="Himmelblau’s function", name="himmelblau", target=(95, 105)), ], - digestion=functions.himmelblau_digestion, + digestion=sketchy_himmelblau_digestion, ) else: diff --git a/src/blop/utils/functions.py b/src/blop/utils/functions.py index e802441..26b16b6 100644 --- a/src/blop/utils/functions.py +++ b/src/blop/utils/functions.py @@ -1,5 +1,4 @@ import numpy as np -import pandas as pd import torch @@ -61,19 +60,6 @@ def binh_korn(x1, x2): return np.where(c, f1, np.nan), np.where(c, f2, np.nan) -def binh_korn_digestion(df: pd.DataFrame) -> pd.DataFrame: - """ - Digests Himmelblau's function into the feedback. - """ - - for index, entry in df.iterrows(): - f1, f2 = binh_korn(entry.x1, entry.x2) - df.loc[index, "f1"] = f1 - df.loc[index, "f2"] = f2 - - return df - - def skewed_himmelblau(x1, x2): """ Himmelblau's function, with skewed coordinates @@ -182,63 +168,3 @@ def kb_tradeoff_4d(x1, x2, x3, x4): flux = np.exp(-0.5 * np.where(d < 5, np.where(d > -5, 0, d + 5), d - 5) ** 2) return x_width, y_width, flux - - -def himmelblau_digestion(df: pd.DataFrame) -> pd.DataFrame: - """ - Digests Himmelblau's function into the feedback. - """ - for index, entry in df.iterrows(): - if not hasattr(entry, "x1"): - df.loc[index, "x1"] = x1 = 0 - else: - x1 = entry.x1 - if not hasattr(entry, "x2"): - df.loc[index, "x2"] = x2 = 0 - else: - x2 = entry.x2 - df.loc[index, "himmelblau"] = himmelblau(x1=x1, x2=x2) - df.loc[index, "himmelblau_transpose"] = himmelblau(x1=x2, x2=x1) - - return df - - -def constrained_himmelblau_digestion(df: pd.DataFrame) -> pd.DataFrame: - """ - Digests Himmelblau's function into the feedback. - """ - - df = himmelblau_digestion(df) - df.loc[:, "himmelblau"] = np.where(df.x1.values**2 + df.x1.values**2 < 36, df.himmelblau.values, np.nan) - - return df - - -""" -Chankong and Haimes function from https://en.wikipedia.org/wiki/Test_functions_for_optimization -""" - - -def chankong_and_haimes_digestion(df): - for index, entry in df.iterrows(): - df.loc[index, "f1"] = (entry.x1 - 2) ** 2 + (entry.x2 - 1) + 2 - df.loc[index, "f2"] = 9 * entry.x1 - (entry.x2 - 1) + 2 - df.loc[index, "c1"] = entry.x1**2 + entry.x2**2 - df.loc[index, "c2"] = entry.x1 - 3 * entry.x2 + 10 - - return df - - -def mock_kbs_digestion(df: pd.DataFrame) -> pd.DataFrame: - """ - Digests a beam waist and height into the feedback. - """ - - for index, entry in df.iterrows(): - sigma_x = gaussian_beam_waist(entry.x1, entry.x2) - sigma_y = gaussian_beam_waist(entry.x3, entry.x4) - - df.loc[index, "x_width"] = 2 * sigma_x - df.loc[index, "y_width"] = 2 * sigma_y - - return df