Skip to content

Commit

Permalink
fixed docs and added more prereqs
Browse files Browse the repository at this point in the history
  • Loading branch information
thomaswmorris committed Oct 30, 2023
1 parent fca00f0 commit b140195
Show file tree
Hide file tree
Showing 5 changed files with 58 additions and 41 deletions.
24 changes: 14 additions & 10 deletions bloptools/bayesian/agent.py
Original file line number Diff line number Diff line change
Expand Up @@ -136,7 +136,7 @@ def tell(self, new_table=None, append=True, train=True, **kwargs):

likelihood = gpytorch.likelihoods.GaussianLikelihood(
noise_constraint=gpytorch.constraints.Interval(
torch.tensor(1e-4).square(),
torch.tensor(1e-2).square(),
torch.tensor(1 / obj.min_snr).square(),
),
# noise_prior=gpytorch.priors.torch_priors.LogNormalPrior(loc=loc, scale=scale),
Expand Down Expand Up @@ -293,6 +293,9 @@ def acquire(self, acquisition_inputs):
# for obj in self.objectives:
# products.loc[index, objective["key"]] = getattr(entry, objective["key"])

except KeyboardInterrupt:
raise KeyboardInterrupt()

except Exception as error:
if not self.allow_acquisition_errors:
raise error
Expand Down Expand Up @@ -334,11 +337,12 @@ def learn(

if acq_func is not None:
for i in range(iterations):
x, acq_func_meta = self.ask(n=n, acq_func_identifier=acq_func, **kwargs)

new_table = yield from self.acquire(x)
new_table.loc[:, "acq_func"] = acq_func_meta["name"]
self.tell(new_table=new_table, train=train)
print(f"running iteration {i + 1} / {iterations}")
for single_acq_func in np.atleast_1d(acq_func):
x, acq_func_meta = self.ask(n=n, acq_func_identifier=single_acq_func, **kwargs)
new_table = yield from self.acquire(x)
new_table.loc[:, "acq_func"] = acq_func_meta["name"]
self.tell(new_table=new_table, train=train)

self.initialized = True

Expand All @@ -357,12 +361,12 @@ def reset(self):
def benchmark(
self, output_dir="./", runs=16, n_init=64, learning_kwargs_list=[{"acq_func": "qei", "n": 4, "iterations": 16}]
):
cache_limits = {dof.name: dof.limits for dof in self.dofs}
# cache_limits = {dof.name: dof.limits for dof in self.dofs}

for run in range(runs):
for dof in self.dofs:
offset = 0.25 * np.ptp(dof.limits) * np.random.uniform(low=-1, high=1)
dof.limits = (cache_limits[dof.name] + offset, cache_limits[dof.name] + offset)
# for dof in self.dofs:
# offset = 0.25 * np.ptp(dof.limits) * np.random.uniform(low=-1, high=1)
# dof.limits = (cache_limits[dof.name][0] + offset, cache_limits[dof.name][1] + offset)

self.reset()

Expand Down
2 changes: 2 additions & 0 deletions bloptools/bayesian/devices.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,6 +51,8 @@ def __init__(
self.active = active
self.latent_group = latent_group if latent_group is not None else str(uuid.uuid4())

self.device.kind = "hinted"

@property
def lower_limit(self):
return float(self.limits[0])
Expand Down
69 changes: 38 additions & 31 deletions docs/source/tutorials/passive-dofs.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -38,43 +38,42 @@
"metadata": {},
"outputs": [],
"source": [
"from bloptools.utils import prepare_re_env\n",
"\n",
"%run -i $prepare_re_env.__file__ --db-type=temp\n",
"import pytest\n",
"\n",
"from bloptools.utils import functions\n",
"from bloptools.bayesian import DOF, Agent, BrownianMotion, Objective\n",
"\n",
"\n",
"@pytest.mark.test_func\n",
"def test_passive_dofs(RE, db):\n",
" dofs = [\n",
" DOF(name=\"x1\", limits=(-5.0, 5.0)),\n",
" DOF(name=\"x2\", limits=(-5.0, 5.0)),\n",
" DOF(name=\"x3\", limits=(-5.0, 5.0), active=False),\n",
" DOF(BrownianMotion(name=\"brownian1\"), read_only=True),\n",
" DOF(BrownianMotion(name=\"brownian2\"), read_only=True, active=False),\n",
" ]\n",
"dofs = [\n",
" DOF(name=\"x1\", limits=(-5.0, 5.0)),\n",
" DOF(name=\"x2\", limits=(-5.0, 5.0)),\n",
" DOF(name=\"x3\", limits=(-5.0, 5.0), active=False),\n",
" DOF(BrownianMotion(name=\"brownian1\"), read_only=True),\n",
" DOF(BrownianMotion(name=\"brownian2\"), read_only=True, active=False),\n",
"]\n",
"\n",
" objectives = [\n",
" Objective(key=\"himmelblau\", minimize=True),\n",
" ]\n",
"objectives = [\n",
" Objective(key=\"himmelblau\", minimize=True),\n",
"]\n",
"\n",
" agent = Agent(\n",
" dofs=dofs,\n",
" objectives=objectives,\n",
" digestion=functions.constrained_himmelblau_digestion,\n",
" db=db,\n",
" verbose=True,\n",
" tolerate_acquisition_errors=False,\n",
" )\n",
"agent = Agent(\n",
" dofs=dofs,\n",
" objectives=objectives,\n",
" digestion=functions.constrained_himmelblau_digestion,\n",
" db=db,\n",
" verbose=True,\n",
" tolerate_acquisition_errors=False,\n",
")\n",
"\n",
" RE(agent.learn(\"qr\", n=32))\n",
"\n",
" agent.plot_objectives()\n",
" agent.plot_acquisition()\n",
" agent.plot_validity()"
"RE(agent.learn(\"qr\", n=16))"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "49127601",
"metadata": {},
"outputs": [],
"source": [
"agent.dofs"
]
},
{
Expand All @@ -86,6 +85,14 @@
"source": [
"agent.plot_objectives()"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "4d856a7d",
"metadata": {},
"outputs": [],
"source": []
}
],
"metadata": {
Expand All @@ -104,7 +111,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.11.4"
"version": "3.10.12"
},
"vscode": {
"interpreter": {
Expand Down
3 changes: 3 additions & 0 deletions requirements-dev.txt
Original file line number Diff line number Diff line change
Expand Up @@ -2,11 +2,14 @@
# the documentation) but not necessarily required for _using_ it.
black
pytest-codecov
chardet
coverage
flake8
furo
isort
markupsafe
nbstripout
numpydoc
pre-commit
pre-commit-hooks
pytest
Expand Down
1 change: 1 addition & 0 deletions requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@ botorch
databroker
gpytorch
h5py
IPython
matplotlib
numpy
ophyd
Expand Down

0 comments on commit b140195

Please sign in to comment.