Skip to content

Commit 52a60c3

Browse files
authored
Add GradNN regularization weight to parameters
1 parent 8d5bb84 commit 52a60c3

1 file changed

Lines changed: 1 addition & 0 deletions

File tree

rom_application/RomManager_cantilever_NN/demo_rom_manager_nn.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -123,6 +123,7 @@ def GetRomManagerParameters():
123123
"layers_size":[200,200], // Size of each hidden layer in the Neural Network (for more layers, append more values)
124124
"batch_size":4,
125125
"epochs":10,
126+
"NN_gradient_regularisation_weight": 1.0,
126127
"lr_strategy": { // Learning Rate update strategy
127128
"scheduler": "sgdr", // "const", "steps", "sgdr"
128129
"base_lr": 0.001, // Initial LR

0 commit comments

Comments
 (0)