Skip to content

[Refine] Remove redundant code #852

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 26 commits into
base: develop
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
26 commits
Select commit Hold shift + click to select a range
14f572e
remove redundant seed fixing code and logger init code, and remove th…
HydrogenSulfate Apr 16, 2024
a55b867
Merge branch 'develop' into clean_code
HydrogenSulfate Apr 25, 2024
1fefb10
update code
HydrogenSulfate Apr 25, 2024
0486493
support default config node
HydrogenSulfate Apr 30, 2024
b8e39c9
remove deprecated class and function in config.py
HydrogenSulfate Apr 30, 2024
64eedf7
update docstring of callbacks.py
HydrogenSulfate Apr 30, 2024
9529ba4
update code
HydrogenSulfate Apr 30, 2024
f2f6b81
update code
HydrogenSulfate May 9, 2024
26b0aa0
restore some config
HydrogenSulfate May 9, 2024
ce524c5
fix config unitest
HydrogenSulfate May 12, 2024
d5fb64a
fix config unitest
HydrogenSulfate May 12, 2024
ba4f939
Merge branch 'develop' into clean_code
HydrogenSulfate May 12, 2024
f114b22
Fix unitest test_writer
HydrogenSulfate May 12, 2024
f603f30
Merge branch 'clean_code' of https://github.com/HydrogenSulfate/Paddl…
HydrogenSulfate May 12, 2024
3ad386f
fix
HydrogenSulfate May 12, 2024
25a875f
fix
HydrogenSulfate May 12, 2024
ac093ae
add default exclude_keys
HydrogenSulfate May 12, 2024
3b091b9
remove unnecessary files
HydrogenSulfate May 13, 2024
3ef8011
Merge branch 'develop' into clean_code
HydrogenSulfate May 15, 2024
849f6ea
Merge branch 'develop' into clean_code
HydrogenSulfate Jun 7, 2024
6e75c4e
remove redundant annotations
HydrogenSulfate Jun 7, 2024
15311a3
remove more code
HydrogenSulfate Jun 7, 2024
79322a3
print log when reach the training max_steps
HydrogenSulfate Jun 7, 2024
f6312fe
add defaults config for all yaml files
HydrogenSulfate Jun 7, 2024
2c73a85
Merge branch 'develop' into clean_code
HydrogenSulfate Jun 15, 2024
67a3155
simplify more code
HydrogenSulfate Jun 15, 2024
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 4 additions & 4 deletions docs/zh/examples/amgnet.md
Original file line number Diff line number Diff line change
Expand Up @@ -103,17 +103,17 @@ unzip data.zip

=== "airfoil"

``` py linenums="61"
``` py linenums="55"
--8<--
examples/amgnet/amgnet_airfoil.py:61:62
examples/amgnet/amgnet_airfoil.py:55:56
--8<--
```

=== "cylinder"

``` py linenums="61"
``` py linenums="55"
--8<--
examples/amgnet/amgnet_cylinder.py:61:62
examples/amgnet/amgnet_cylinder.py:55:56
--8<--
```

Expand Down
24 changes: 7 additions & 17 deletions examples/NLS-MB/NLS-MB_optical_rogue_wave.py
Original file line number Diff line number Diff line change
Expand Up @@ -233,15 +233,10 @@ def train(cfg: DictConfig):
solver = ppsci.solver.Solver(
model,
constraint,
cfg.output_dir,
optimizer,
epochs=cfg.TRAIN.epochs,
iters_per_epoch=cfg.TRAIN.iters_per_epoch,
eval_during_train=cfg.TRAIN.eval_during_train,
eval_freq=cfg.TRAIN.eval_freq,
optimizer=optimizer,
equation=equation,
geom=geom,
validator=validator,
cfg=cfg,
)
# train model
solver.train()
Expand All @@ -259,14 +254,12 @@ def train(cfg: DictConfig):
model,
constraint,
OUTPUT_DIR,
optimizer_lbfgs,
None,
EPOCHS,
cfg.TRAIN.lbfgs.iters_per_epoch,
optimizer=optimizer_lbfgs,
epochs=EPOCHS,
iters_per_epoch=cfg.TRAIN.lbfgs.iters_per_epoch,
eval_during_train=cfg.TRAIN.lbfgs.eval_during_train,
eval_freq=cfg.TRAIN.lbfgs.eval_freq,
equation=equation,
geom=geom,
validator=validator,
)
# train model
Expand Down Expand Up @@ -341,12 +334,9 @@ def evaluate(cfg: DictConfig):
# initialize solver
solver = ppsci.solver.Solver(
model,
output_dir=cfg.output_dir,
eval_freq=cfg.TRAIN.eval_freq,
equation=equation,
geom=geom,
validator=validator,
pretrained_model_path=cfg.EVAL.pretrained_model_path,
cfg=cfg,
)
solver.eval()

Expand Down Expand Up @@ -374,7 +364,7 @@ def export(cfg: DictConfig):
# initialize solver
solver = ppsci.solver.Solver(
model,
pretrained_model_path=cfg.INFER.pretrained_model_path,
cfg=cfg,
)
# export model
from paddle.static import InputSpec
Expand Down
22 changes: 6 additions & 16 deletions examples/NLS-MB/NLS-MB_optical_soliton.py
Original file line number Diff line number Diff line change
Expand Up @@ -210,15 +210,10 @@ def train(cfg: DictConfig):
solver = ppsci.solver.Solver(
model,
constraint,
cfg.output_dir,
optimizer,
epochs=cfg.TRAIN.epochs,
iters_per_epoch=cfg.TRAIN.iters_per_epoch,
eval_during_train=cfg.TRAIN.eval_during_train,
eval_freq=cfg.TRAIN.eval_freq,
optimizer=optimizer,
equation=equation,
geom=geom,
validator=validator,
cfg=cfg,
)
# train model
solver.train()
Expand All @@ -237,13 +232,11 @@ def train(cfg: DictConfig):
constraint,
OUTPUT_DIR,
optimizer_lbfgs,
None,
EPOCHS,
cfg.TRAIN.lbfgs.iters_per_epoch,
epochs=EPOCHS,
iters_per_epoch=cfg.TRAIN.lbfgs.iters_per_epoch,
eval_during_train=cfg.TRAIN.lbfgs.eval_during_train,
eval_freq=cfg.TRAIN.lbfgs.eval_freq,
equation=equation,
geom=geom,
validator=validator,
)
# train model
Expand Down Expand Up @@ -318,12 +311,9 @@ def evaluate(cfg: DictConfig):
# initialize solver
solver = ppsci.solver.Solver(
model,
output_dir=cfg.output_dir,
eval_freq=cfg.TRAIN.eval_freq,
equation=equation,
geom=geom,
validator=validator,
pretrained_model_path=cfg.EVAL.pretrained_model_path,
cfg=cfg,
)
solver.eval()

Expand Down Expand Up @@ -351,7 +341,7 @@ def export(cfg: DictConfig):
# initialize solver
solver = ppsci.solver.Solver(
model,
pretrained_model_path=cfg.INFER.pretrained_model_path,
cfg=cfg,
)
# export model
from paddle.static import InputSpec
Expand Down
19 changes: 10 additions & 9 deletions examples/NLS-MB/conf/NLS-MB_rogue_wave.yaml
Original file line number Diff line number Diff line change
@@ -1,19 +1,20 @@
defaults:
- ppsci_default
- TRAIN: train_default
- TRAIN/ema: ema_default
- TRAIN/swa: swa_default
- EVAL: eval_default
- INFER: infer_default
- hydra/job/config/override_dirname/exclude_keys: exclude_keys_default
- _self_

hydra:
run:
# dynamic output directory according to running time and override name
dir: output_NLS-MB_rogue_wave/${now:%Y-%m-%d}/${now:%H-%M-%S}/${hydra.job.override_dirname}
job:
name: ${mode} # name of logfile
chdir: false # keep current working directory unchanged
config:
override_dirname:
exclude_keys:
- TRAIN.checkpoint_path
- TRAIN.pretrained_model_path
- EVAL.pretrained_model_path
- mode
- output_dir
- log_freq
callbacks:
init_callback:
_target_: ppsci.utils.callbacks.InitCallback
Expand Down
19 changes: 10 additions & 9 deletions examples/NLS-MB/conf/NLS-MB_soliton.yaml
Original file line number Diff line number Diff line change
@@ -1,19 +1,20 @@
defaults:
- ppsci_default
- TRAIN: train_default
- TRAIN/ema: ema_default
- TRAIN/swa: swa_default
- EVAL: eval_default
- INFER: infer_default
- hydra/job/config/override_dirname/exclude_keys: exclude_keys_default
- _self_

hydra:
run:
# dynamic output directory according to running time and override name
dir: output_NLS-MB_soliton/${now:%Y-%m-%d}/${now:%H-%M-%S}/${hydra.job.override_dirname}
job:
name: ${mode} # name of logfile
chdir: false # keep current working directory unchanged
config:
override_dirname:
exclude_keys:
- TRAIN.checkpoint_path
- TRAIN.pretrained_model_path
- EVAL.pretrained_model_path
- mode
- output_dir
- log_freq
callbacks:
init_callback:
_target_: ppsci.utils.callbacks.InitCallback
Expand Down
29 changes: 3 additions & 26 deletions examples/RegAE/RegAE.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,23 +14,15 @@

from __future__ import annotations

from os import path as osp

import hydra
import paddle
from omegaconf import DictConfig
from paddle.nn import functional as F

import ppsci
from ppsci.utils import logger


def train(cfg: DictConfig):
# set random seed for reproducibility
ppsci.utils.misc.set_random_seed(cfg.seed)
# initialize logger
logger.init_logger("ppsci", osp.join(cfg.output_dir, "train.log"), "info")

# set model
model = ppsci.arch.AutoEncoder(**cfg.MODEL)

Expand Down Expand Up @@ -99,16 +91,9 @@ def loss_expr(output_dict, label_dict, weight_dict=None):
solver = ppsci.solver.Solver(
model,
constraint,
cfg.output_dir,
optimizer,
None,
cfg.TRAIN.epochs,
cfg.TRAIN.iters_per_epoch,
save_freq=cfg.TRAIN.save_freq,
eval_during_train=cfg.TRAIN.eval_during_train,
eval_freq=cfg.TRAIN.eval_freq,
optimizer=optimizer,
validator=validator,
eval_with_no_grad=cfg.EVAL.eval_with_no_grad,
cfg=cfg,
)
# train model
solver.train()
Expand All @@ -117,11 +102,6 @@ def loss_expr(output_dict, label_dict, weight_dict=None):


def evaluate(cfg: DictConfig):
# set random seed for reproducibility
ppsci.utils.misc.set_random_seed(cfg.seed)
# initialize logger
logger.init_logger("ppsci", osp.join(cfg.output_dir, "eval.log"), "info")

# set model
model = ppsci.arch.AutoEncoder(**cfg.MODEL)

Expand Down Expand Up @@ -151,11 +131,8 @@ def evaluate(cfg: DictConfig):
# initialize solver
solver = ppsci.solver.Solver(
model,
None,
output_dir=cfg.output_dir,
validator=validator,
pretrained_model_path=cfg.EVAL.pretrained_model_path,
eval_with_no_grad=cfg.EVAL.eval_with_no_grad,
cfg=cfg,
)
# evaluate after finished training
solver.eval()
Expand Down
22 changes: 13 additions & 9 deletions examples/RegAE/conf/RegAE.yaml
Original file line number Diff line number Diff line change
@@ -1,19 +1,23 @@
defaults:
- ppsci_default
- TRAIN: train_default
- TRAIN/ema: ema_default
- TRAIN/swa: swa_default
- EVAL: eval_default
- INFER: infer_default
- hydra/job/config/override_dirname/exclude_keys: exclude_keys_default
- _self_

hydra:
run:
# dynamic output directory according to running time and override name
dir: output_RegAE/${now:%Y-%m-%d}/${now:%H-%M-%S}/${hydra.job.override_dirname}
job:
name: ${mode} # name of logfile
chdir: false # keep current working directory unchanged
config:
override_dirname:
exclude_keys:
- TRAIN.checkpoint_path
- TRAIN.pretrained_model_path
- EVAL.pretrained_model_path
- mode
- output_dir
- log_freq
callbacks:
init_callback:
_target_: ppsci.utils.callbacks.InitCallback
sweep:
# output directory for multirun
dir: ${hydra.run.dir}
Expand Down
22 changes: 3 additions & 19 deletions examples/allen_cahn/allen_cahn_causal.py
Original file line number Diff line number Diff line change
Expand Up @@ -158,22 +158,9 @@ def gen_label_batch(input_batch):
solver = ppsci.solver.Solver(
model,
constraint,
cfg.output_dir,
optimizer,
lr_scheduler,
cfg.TRAIN.epochs,
cfg.TRAIN.iters_per_epoch,
save_freq=cfg.TRAIN.save_freq,
log_freq=cfg.log_freq,
eval_during_train=True,
eval_freq=cfg.TRAIN.eval_freq,
seed=cfg.seed,
optimizer=optimizer,
equation=equation,
validator=validator,
pretrained_model_path=cfg.TRAIN.pretrained_model_path,
checkpoint_path=cfg.TRAIN.checkpoint_path,
eval_with_no_grad=cfg.EVAL.eval_with_no_grad,
use_tbd=True,
cfg=cfg,
)
# train model
Expand Down Expand Up @@ -222,11 +209,8 @@ def evaluate(cfg: DictConfig):
# initialize solver
solver = ppsci.solver.Solver(
model,
output_dir=cfg.output_dir,
log_freq=cfg.log_freq,
validator=validator,
pretrained_model_path=cfg.EVAL.pretrained_model_path,
eval_with_no_grad=cfg.EVAL.eval_with_no_grad,
cfg=cfg,
)

# evaluate after finished training
Expand All @@ -248,7 +232,7 @@ def export(cfg: DictConfig):
# initialize solver
solver = ppsci.solver.Solver(
model,
pretrained_model_path=cfg.INFER.pretrained_model_path,
cfg=cfg,
)
# export model
from paddle.static import InputSpec
Expand Down
22 changes: 3 additions & 19 deletions examples/allen_cahn/allen_cahn_plain.py
Original file line number Diff line number Diff line change
Expand Up @@ -156,22 +156,9 @@ def gen_label_batch(input_batch):
solver = ppsci.solver.Solver(
model,
constraint,
cfg.output_dir,
optimizer,
lr_scheduler,
cfg.TRAIN.epochs,
cfg.TRAIN.iters_per_epoch,
save_freq=cfg.TRAIN.save_freq,
log_freq=cfg.log_freq,
eval_during_train=True,
eval_freq=cfg.TRAIN.eval_freq,
seed=cfg.seed,
optimizer=optimizer,
equation=equation,
validator=validator,
pretrained_model_path=cfg.TRAIN.pretrained_model_path,
checkpoint_path=cfg.TRAIN.checkpoint_path,
eval_with_no_grad=cfg.EVAL.eval_with_no_grad,
use_tbd=True,
cfg=cfg,
)
# train model
Expand Down Expand Up @@ -220,11 +207,8 @@ def evaluate(cfg: DictConfig):
# initialize solver
solver = ppsci.solver.Solver(
model,
output_dir=cfg.output_dir,
log_freq=cfg.log_freq,
validator=validator,
pretrained_model_path=cfg.EVAL.pretrained_model_path,
eval_with_no_grad=cfg.EVAL.eval_with_no_grad,
cfg=cfg,
)

# evaluate after finished training
Expand All @@ -246,7 +230,7 @@ def export(cfg: DictConfig):
# initialize solver
solver = ppsci.solver.Solver(
model,
pretrained_model_path=cfg.INFER.pretrained_model_path,
cfg=cfg,
)
# export model
from paddle.static import InputSpec
Expand Down
Loading