From 2a31829e473ddbc8fd7c00e39593c72ff481f0ed Mon Sep 17 00:00:00 2001 From: pescap Date: Wed, 6 Dec 2023 09:36:41 -0300 Subject: [PATCH 1/4] initial commit towards softadapt --- deepxde/callbacks.py | 22 ++++++++++++++++++++++ 1 file changed, 22 insertions(+) diff --git a/deepxde/callbacks.py b/deepxde/callbacks.py index 3039390df..ed1991b47 100644 --- a/deepxde/callbacks.py +++ b/deepxde/callbacks.py @@ -571,3 +571,25 @@ def on_epoch_end(self): raise ValueError( "`num_bcs` changed! Please update the loss function by `model.compile`." ) + + +class SoftAdapt(Callback): + """Use adaptive loss balancing. + + Args: + warmup: number of steps without applying any weighting. + epsilon: parameter to prevent overflows. + + """ + + def __init__(self, warmup=0, epsilon=1e-8): + super().__init__() + + self.baseline = warmup + self.loss = None + + def on_train_begin(self): + # Allow instances to be re-used. + # Evaluate coefficients. + # Update weights. + From da7685b27c9f4224b37db5302301d9d3697b4c92 Mon Sep 17 00:00:00 2001 From: pescap Date: Wed, 6 Dec 2023 09:37:34 -0300 Subject: [PATCH 2/4] initial commit towards softadapt --- deepxde/callbacks.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/deepxde/callbacks.py b/deepxde/callbacks.py index ed1991b47..61472a40b 100644 --- a/deepxde/callbacks.py +++ b/deepxde/callbacks.py @@ -574,12 +574,12 @@ def on_epoch_end(self): class SoftAdapt(Callback): - """Use adaptive loss balancing. + """Use adaptive loss balancing. Args: warmup: number of steps without applying any weighting. epsilon: parameter to prevent overflows. - + """ def __init__(self, warmup=0, epsilon=1e-8): @@ -592,4 +592,4 @@ def on_train_begin(self): # Allow instances to be re-used. # Evaluate coefficients. # Update weights. - + print("work in progress") From ca92dc7c6ab34827a315113fc2393802b6ed5d4a Mon Sep 17 00:00:00 2001 From: pescap Date: Wed, 6 Dec 2023 10:22:26 -0300 Subject: [PATCH 3/4] working on setting variable loss_weights --- deepxde/callbacks.py | 21 +++++++++++++++------ 1 file changed, 15 insertions(+), 6 deletions(-) diff --git a/deepxde/callbacks.py b/deepxde/callbacks.py index 61472a40b..f7188540b 100644 --- a/deepxde/callbacks.py +++ b/deepxde/callbacks.py @@ -577,19 +577,28 @@ class SoftAdapt(Callback): """Use adaptive loss balancing. Args: - warmup: number of steps without applying any weighting. + beta: If beta > 0, then softAdapt will pay more attention the worst performing + loss component. If beta < 0, then SoftAdapt will assign higher weights + to the better performing components. Beta==0 is the trivial case and + all loss components will have coefficient 1. epsilon: parameter to prevent overflows. """ - def __init__(self, warmup=0, epsilon=1e-8): + def __init__(self, beta=.1, epsilon=1e-8): super().__init__() - self.baseline = warmup - self.loss = None + self.beta = beta + self.epsilon = epsilon def on_train_begin(self): + loss_weights = tf.constant(self.model.loss_weights) + loss_weights = dde.Variable(loss_weights, trainable=False, dtype=loss_weights.dtype) + loss_weights *= 0 + + self.model.loss_weights = loss_weights + + print(loss_weights, 'loss_weights') # Allow instances to be re-used. # Evaluate coefficients. - # Update weights. - print("work in progress") + # Update weights. \ No newline at end of file From b1ea5a2be6ed1e6cbf8b68f6516ff657efc0bf9a Mon Sep 17 00:00:00 2001 From: pescap Date: Wed, 6 Dec 2023 11:10:53 -0300 Subject: [PATCH 4/4] minor changes --- deepxde/callbacks.py | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/deepxde/callbacks.py b/deepxde/callbacks.py index f7188540b..ce6957bc8 100644 --- a/deepxde/callbacks.py +++ b/deepxde/callbacks.py @@ -6,7 +6,7 @@ from . import config from . import gradients as grad from . import utils -from .backend import backend_name, tf, torch, paddle +from .backend import backend_name, tf, torch, paddle, Variable class Callback: @@ -585,7 +585,7 @@ class SoftAdapt(Callback): """ - def __init__(self, beta=.1, epsilon=1e-8): + def __init__(self, beta=0.1, epsilon=1e-8): super().__init__() self.beta = beta @@ -593,12 +593,12 @@ def __init__(self, beta=.1, epsilon=1e-8): def on_train_begin(self): loss_weights = tf.constant(self.model.loss_weights) - loss_weights = dde.Variable(loss_weights, trainable=False, dtype=loss_weights.dtype) + loss_weights = Variable(loss_weights, dtype=loss_weights.dtype) loss_weights *= 0 - + self.model.loss_weights = loss_weights - - print(loss_weights, 'loss_weights') + + print(loss_weights, "loss_weights") # Allow instances to be re-used. # Evaluate coefficients. - # Update weights. \ No newline at end of file + # Update weights.