diff --git a/courses/udacity_intro_to_tensorflow_for_deep_learning/l08c05_forecasting_with_machine_learning.ipynb b/courses/udacity_intro_to_tensorflow_for_deep_learning/l08c05_forecasting_with_machine_learning.ipynb index 388329c85b5..289d5f2d83a 100644 --- a/courses/udacity_intro_to_tensorflow_for_deep_learning/l08c05_forecasting_with_machine_learning.ipynb +++ b/courses/udacity_intro_to_tensorflow_for_deep_learning/l08c05_forecasting_with_machine_learning.ipynb @@ -216,7 +216,7 @@ "model = keras.models.Sequential([\n", " keras.layers.Dense(1, input_shape=[window_size])\n", "])\n", - "optimizer = keras.optimizers.SGD(lr=1e-5, momentum=0.9)\n", + "optimizer = keras.optimizers.SGD(learning_rate=1e-5, momentum=0.9)\n", "model.compile(loss=keras.losses.Huber(),\n", " optimizer=optimizer,\n", " metrics=[\"mae\"])\n", @@ -244,7 +244,7 @@ "\n", "lr_schedule = keras.callbacks.LearningRateScheduler(\n", " lambda epoch: 1e-6 * 10**(epoch / 30))\n", - "optimizer = keras.optimizers.SGD(lr=1e-6, momentum=0.9)\n", + "optimizer = keras.optimizers.SGD(learning_rate=1e-6, momentum=0.9)\n", "model.compile(loss=keras.losses.Huber(),\n", " optimizer=optimizer,\n", " metrics=[\"mae\"])\n", @@ -282,7 +282,7 @@ "model = keras.models.Sequential([\n", " keras.layers.Dense(1, input_shape=[window_size])\n", "])\n", - "optimizer = keras.optimizers.SGD(lr=1e-5, momentum=0.9)\n", + "optimizer = keras.optimizers.SGD(learning_rate=1e-5, momentum=0.9)\n", "model.compile(loss=keras.losses.Huber(),\n", " optimizer=optimizer,\n", " metrics=[\"mae\"])\n", @@ -387,7 +387,7 @@ "\n", "lr_schedule = keras.callbacks.LearningRateScheduler(\n", " lambda epoch: 1e-7 * 10**(epoch / 20))\n", - "optimizer = keras.optimizers.SGD(lr=1e-7, momentum=0.9)\n", + "optimizer = keras.optimizers.SGD(learning_rate=1e-7, momentum=0.9)\n", "model.compile(loss=keras.losses.Huber(),\n", " optimizer=optimizer,\n", " metrics=[\"mae\"])\n", @@ -428,7 +428,7 @@ " keras.layers.Dense(1)\n", "])\n", "\n", - "optimizer = keras.optimizers.SGD(lr=1e-5, momentum=0.9)\n", + "optimizer = keras.optimizers.SGD(learning_rate=1e-5, momentum=0.9)\n", "model.compile(loss=keras.losses.Huber(),\n", " optimizer=optimizer,\n", " metrics=[\"mae\"])\n", diff --git a/courses/udacity_intro_to_tensorflow_for_deep_learning/l08c06_forecasting_with_rnn.ipynb b/courses/udacity_intro_to_tensorflow_for_deep_learning/l08c06_forecasting_with_rnn.ipynb index 962046f7237..f0eb58a4953 100644 --- a/courses/udacity_intro_to_tensorflow_for_deep_learning/l08c06_forecasting_with_rnn.ipynb +++ b/courses/udacity_intro_to_tensorflow_for_deep_learning/l08c06_forecasting_with_rnn.ipynb @@ -211,7 +211,7 @@ "])\n", "lr_schedule = keras.callbacks.LearningRateScheduler(\n", " lambda epoch: 1e-7 * 10**(epoch / 20))\n", - "optimizer = keras.optimizers.SGD(lr=1e-7, momentum=0.9)\n", + "optimizer = keras.optimizers.SGD(learning_rate=1e-7, momentum=0.9)\n", "model.compile(loss=keras.losses.Huber(),\n", " optimizer=optimizer,\n", " metrics=[\"mae\"])\n", @@ -254,7 +254,7 @@ " keras.layers.Dense(1),\n", " keras.layers.Lambda(lambda x: x * 200.0)\n", "])\n", - "optimizer = keras.optimizers.SGD(lr=1.5e-6, momentum=0.9)\n", + "optimizer = keras.optimizers.SGD(learning_rate=1.5e-6, momentum=0.9)\n", "model.compile(loss=keras.losses.Huber(),\n", " optimizer=optimizer,\n", " metrics=[\"mae\"])\n", @@ -382,7 +382,7 @@ "])\n", "lr_schedule = keras.callbacks.LearningRateScheduler(\n", " lambda epoch: 1e-7 * 10**(epoch / 30))\n", - "optimizer = keras.optimizers.SGD(lr=1e-7, momentum=0.9)\n", + "optimizer = keras.optimizers.SGD(learning_rate=1e-7, momentum=0.9)\n", "model.compile(loss=keras.losses.Huber(),\n", " optimizer=optimizer,\n", " metrics=[\"mae\"])\n", @@ -426,7 +426,7 @@ " keras.layers.Dense(1),\n", " keras.layers.Lambda(lambda x: x * 200.0)\n", "])\n", - "optimizer = keras.optimizers.SGD(lr=1e-6, momentum=0.9)\n", + "optimizer = keras.optimizers.SGD(learning_rate=1e-6, momentum=0.9)\n", "model.compile(loss=keras.losses.Huber(),\n", " optimizer=optimizer,\n", " metrics=[\"mae\"])\n", diff --git a/courses/udacity_intro_to_tensorflow_for_deep_learning/l08c07_forecasting_with_stateful_rnn.ipynb b/courses/udacity_intro_to_tensorflow_for_deep_learning/l08c07_forecasting_with_stateful_rnn.ipynb index b12d836eae8..a77d0b39a2e 100644 --- a/courses/udacity_intro_to_tensorflow_for_deep_learning/l08c07_forecasting_with_stateful_rnn.ipynb +++ b/courses/udacity_intro_to_tensorflow_for_deep_learning/l08c07_forecasting_with_stateful_rnn.ipynb @@ -234,7 +234,7 @@ "lr_schedule = keras.callbacks.LearningRateScheduler(\n", " lambda epoch: 1e-8 * 10**(epoch / 30))\n", "reset_states = ResetStatesCallback()\n", - "optimizer = keras.optimizers.SGD(lr=1e-8, momentum=0.9)\n", + "optimizer = keras.optimizers.SGD(learning_rate=1e-8, momentum=0.9)\n", "model.compile(loss=keras.losses.Huber(),\n", " optimizer=optimizer,\n", " metrics=[\"mae\"])\n", @@ -277,7 +277,7 @@ " keras.layers.Dense(1),\n", " keras.layers.Lambda(lambda x: x * 200.0)\n", "])\n", - "optimizer = keras.optimizers.SGD(lr=1e-7, momentum=0.9)\n", + "optimizer = keras.optimizers.SGD(learning_rate=1e-7, momentum=0.9)\n", "model.compile(loss=keras.losses.Huber(),\n", " optimizer=optimizer,\n", " metrics=[\"mae\"])\n", diff --git a/courses/udacity_intro_to_tensorflow_for_deep_learning/l08c08_forecasting_with_lstm.ipynb b/courses/udacity_intro_to_tensorflow_for_deep_learning/l08c08_forecasting_with_lstm.ipynb index e8dfba8208a..24fdbbbf6c9 100644 --- a/courses/udacity_intro_to_tensorflow_for_deep_learning/l08c08_forecasting_with_lstm.ipynb +++ b/courses/udacity_intro_to_tensorflow_for_deep_learning/l08c08_forecasting_with_lstm.ipynb @@ -214,7 +214,7 @@ "lr_schedule = keras.callbacks.LearningRateScheduler(\n", " lambda epoch: 1e-8 * 10**(epoch / 20))\n", "reset_states = ResetStatesCallback()\n", - "optimizer = keras.optimizers.SGD(lr=1e-8, momentum=0.9)\n", + "optimizer = keras.optimizers.SGD(learning_rate=1e-8, momentum=0.9)\n", "model.compile(loss=keras.losses.Huber(),\n", " optimizer=optimizer,\n", " metrics=[\"mae\"])\n", @@ -257,7 +257,7 @@ " keras.layers.Dense(1),\n", " keras.layers.Lambda(lambda x: x * 200.0)\n", "])\n", - "optimizer = keras.optimizers.SGD(lr=5e-7, momentum=0.9)\n", + "optimizer = keras.optimizers.SGD(learning_rate=5e-7, momentum=0.9)\n", "model.compile(loss=keras.losses.Huber(),\n", " optimizer=optimizer,\n", " metrics=[\"mae\"])\n", diff --git a/courses/udacity_intro_to_tensorflow_for_deep_learning/l08c09_forecasting_with_cnn.ipynb b/courses/udacity_intro_to_tensorflow_for_deep_learning/l08c09_forecasting_with_cnn.ipynb index fc72b7a9ef0..86cb7f64601 100644 --- a/courses/udacity_intro_to_tensorflow_for_deep_learning/l08c09_forecasting_with_cnn.ipynb +++ b/courses/udacity_intro_to_tensorflow_for_deep_learning/l08c09_forecasting_with_cnn.ipynb @@ -216,7 +216,7 @@ "])\n", "lr_schedule = keras.callbacks.LearningRateScheduler(\n", " lambda epoch: 1e-8 * 10**(epoch / 20))\n", - "optimizer = keras.optimizers.SGD(lr=1e-8, momentum=0.9)\n", + "optimizer = keras.optimizers.SGD(learning_rate=1e-8, momentum=0.9)\n", "model.compile(loss=keras.losses.Huber(),\n", " optimizer=optimizer,\n", " metrics=[\"mae\"])\n", @@ -263,7 +263,7 @@ " keras.layers.Dense(1),\n", " keras.layers.Lambda(lambda x: x * 200)\n", "])\n", - "optimizer = keras.optimizers.SGD(lr=1e-5, momentum=0.9)\n", + "optimizer = keras.optimizers.SGD(learning_rate=1e-5, momentum=0.9)\n", "model.compile(loss=keras.losses.Huber(),\n", " optimizer=optimizer,\n", " metrics=[\"mae\"])\n", @@ -362,7 +362,7 @@ "model.add(keras.layers.Conv1D(filters=1, kernel_size=1))\n", "lr_schedule = keras.callbacks.LearningRateScheduler(\n", " lambda epoch: 1e-4 * 10**(epoch / 30))\n", - "optimizer = keras.optimizers.Adam(lr=1e-4)\n", + "optimizer = keras.optimizers.Adam(learning_rate=1e-4)\n", "model.compile(loss=keras.losses.Huber(),\n", " optimizer=optimizer,\n", " metrics=[\"mae\"])\n", @@ -411,7 +411,7 @@ " activation=\"relu\")\n", " )\n", "model.add(keras.layers.Conv1D(filters=1, kernel_size=1))\n", - "optimizer = keras.optimizers.Adam(lr=3e-4)\n", + "optimizer = keras.optimizers.Adam(learning_rate=3e-4)\n", "model.compile(loss=keras.losses.Huber(),\n", " optimizer=optimizer,\n", " metrics=[\"mae\"])\n",