From 6cf2951e4499404b80fd999d6a2e2b5f8b6e7f30 Mon Sep 17 00:00:00 2001 From: megemini Date: Wed, 17 Jan 2024 19:45:35 +0800 Subject: [PATCH] =?UTF-8?q?=E3=80=90PPSCI=20Doc=20No.20=E3=80=91ppsci.arch?= =?UTF-8?q?.DeepPhyLSTM=09=20(#756)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * [Add] phylstm examples * [Change] phylstm examples * [Change] remove blank line --- ppsci/arch/phylstm.py | 40 +++++++++++++++++++++++++++++++++++++++- 1 file changed, 39 insertions(+), 1 deletion(-) diff --git a/ppsci/arch/phylstm.py b/ppsci/arch/phylstm.py index a04ed43ac..b2d935465 100644 --- a/ppsci/arch/phylstm.py +++ b/ppsci/arch/phylstm.py @@ -28,8 +28,46 @@ class DeepPhyLSTM(base.Arch): model_type (int, optional): The model type, value is 2 or 3, 2 indicates having two sub-models, 3 indicates having three submodels. Defaults to 2. Examples: + >>> import paddle >>> import ppsci - >>> model = ppsci.arch.DeepPhyLSTM(1, 1, 100) + >>> # model_type is `2` + >>> model = ppsci.arch.DeepPhyLSTM( + ... input_size=16, + ... output_size=1, + ... hidden_size=100, + ... model_type=2) + >>> out = model( + ... {"ag":paddle.rand([64, 16, 16]), + ... "ag_c":paddle.rand([64, 16, 16]), + ... "phi":paddle.rand([1, 16, 16])}) + >>> for k, v in out.items(): + ... print(f"{k} {v.dtype} {v.shape}") + eta_pred paddle.float32 [64, 16, 1] + eta_dot_pred paddle.float32 [64, 16, 1] + g_pred paddle.float32 [64, 16, 1] + eta_t_pred_c paddle.float32 [64, 16, 1] + eta_dot_pred_c paddle.float32 [64, 16, 1] + lift_pred_c paddle.float32 [64, 16, 1] + >>> # model_type is `3` + >>> model = ppsci.arch.DeepPhyLSTM( + ... input_size=16, + ... output_size=1, + ... hidden_size=100, + ... model_type=3) + >>> out = model( + ... {"ag":paddle.rand([64, 16, 1]), + ... "ag_c":paddle.rand([64, 16, 1]), + ... "phi":paddle.rand([1, 16, 16])}) + >>> for k, v in out.items(): + ... print(f"{k} {v.dtype} {v.shape}") + eta_pred paddle.float32 [64, 16, 1] + eta_dot_pred paddle.float32 [64, 16, 1] + g_pred paddle.float32 [64, 16, 1] + eta_t_pred_c paddle.float32 [64, 16, 1] + eta_dot_pred_c paddle.float32 [64, 16, 1] + lift_pred_c paddle.float32 [64, 16, 1] + g_t_pred_c paddle.float32 [64, 16, 1] + g_dot_pred_c paddle.float32 [64, 16, 1] """ def __init__(self, input_size, output_size, hidden_size=100, model_type=2):