Skip to content

Commit

Permalink
adding quickstart notebook
Browse files Browse the repository at this point in the history
  • Loading branch information
csteinmetz1 committed Nov 12, 2023
1 parent d34a0cf commit a7240e6
Show file tree
Hide file tree
Showing 2 changed files with 101 additions and 3 deletions.
9 changes: 6 additions & 3 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,8 @@ Using an effect in your computation graph is as simple as calling the function w

Here is a minimal example to demonstrate reverse engineering the drive value of a simple distortion effect using gradient descent.

Try it for yourself: [![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/csteinmetz1/dasp-pytorch/blob/main/examples/quickstart.ipynb)

```python
import torch
import torchaudio
Expand All @@ -56,7 +58,7 @@ x = x.unsqueeze(0)

# apply some distortion with 16 dB drive
drive = torch.tensor([16.0])
y = dasp_pytorch.functional.distortion(x, drive)
y = dasp_pytorch.functional.distortion(x, sr, drive)

# create a parameter to optimizer
drive_hat = torch.nn.Parameter(torch.tensor(0.0))
Expand All @@ -66,7 +68,7 @@ optimizer = torch.optim.Adam([drive_hat], lr=0.01)
n_iters = 2500
for n in range(n_iters):
# apply distortion with the estimated parameter
y_hat = dasp_pytorch.functional.distortion(x, drive_hat)
y_hat = dasp_pytorch.functional.distortion(x, sr, drive_hat)

# compute distance between estimate and target
loss = torch.nn.functional.mse_loss(y_hat, y)
Expand All @@ -76,8 +78,9 @@ for n in range(n_iters):
loss.backward()
optimizer.step()
print(
f"step: {n+1}/{n_iters}, loss: {loss.item():.3f}, drive: {drive_hat.item():.3f}"
f"step: {n+1}/{n_iters}, loss: {loss.item():.3e}, drive: {drive_hat.item():.3f}\r"
)

```

For the remaining examples we will use the [GuitarSet](https://guitarset.weebly.com/) dataset.
Expand Down
95 changes: 95 additions & 0 deletions examples/quickstart.ipynb
Original file line number Diff line number Diff line change
@@ -0,0 +1,95 @@
{
"nbformat": 4,
"nbformat_minor": 0,
"metadata": {
"colab": {
"provenance": []
},
"kernelspec": {
"name": "python3",
"display_name": "Python 3"
},
"language_info": {
"name": "python"
}
},
"cells": [
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"id": "Q32l5f06ohtz"
},
"outputs": [],
"source": [
"!pip install dasp-pytorch"
]
},
{
"cell_type": "code",
"source": [
"!wget https://csteinmetz1.github.io/sounds/assets/short_riff.wav"
],
"metadata": {
"id": "MsHHumIypsAy"
},
"execution_count": null,
"outputs": []
},
{
"cell_type": "code",
"source": [
"import torch\n",
"import torchaudio\n",
"import dasp_pytorch"
],
"metadata": {
"id": "VGSU9x2VojWg"
},
"execution_count": null,
"outputs": []
},
{
"cell_type": "code",
"source": [
"# Load audio\n",
"x, sr = torchaudio.load(\"short_riff.wav.1\")\n",
"\n",
"# create batch dim\n",
"# (batch_size, n_channels, n_samples)\n",
"x = x.unsqueeze(0)\n",
"\n",
"# apply some distortion with 16 dB drive\n",
"drive = torch.tensor([16.0])\n",
"y = dasp_pytorch.functional.distortion(x, sr, drive)\n",
"\n",
"# create a parameter to optimizer\n",
"drive_hat = torch.nn.Parameter(torch.tensor(0.0))\n",
"optimizer = torch.optim.Adam([drive_hat], lr=0.01)\n",
"\n",
"# optimize the parameter\n",
"n_iters = 2500\n",
"for n in range(n_iters):\n",
" # apply distortion with the estimated parameter\n",
" y_hat = dasp_pytorch.functional.distortion(x, sr, drive_hat)\n",
"\n",
" # compute distance between estimate and target\n",
" loss = torch.nn.functional.mse_loss(y_hat, y)\n",
"\n",
" # optimize\n",
" optimizer.zero_grad()\n",
" loss.backward()\n",
" optimizer.step()\n",
" print(\n",
" f\"step: {n+1}/{n_iters}, loss: {loss.item():.3e}, drive: {drive_hat.item():.3f}\\r\"\n",
" )\n",
""
],
"metadata": {
"id": "MOhyTl3mopSi"
},
"execution_count": null,
"outputs": []
}
]
}

0 comments on commit a7240e6

Please sign in to comment.