From 6b9a23e14e200eb6ca326b6215fc1a2134a28a10 Mon Sep 17 00:00:00 2001 From: lvhaoyu Date: Thu, 23 Mar 2023 17:56:27 +0800 Subject: [PATCH] remove assert in code --- ms_adapter/pytorch/nn/functional.py | 5 ++++- ms_adapter/pytorch/nn/modules/loss.py | 4 +++- 2 files changed, 7 insertions(+), 2 deletions(-) diff --git a/ms_adapter/pytorch/nn/functional.py b/ms_adapter/pytorch/nn/functional.py index 5b93412c..cad63ace 100644 --- a/ms_adapter/pytorch/nn/functional.py +++ b/ms_adapter/pytorch/nn/functional.py @@ -2144,7 +2144,10 @@ def layer_norm(inputs, normalized_shape, weight=None, bias=None, eps=1e-05): bias = cast_to_ms_tensor(bias) else: bias = ms.Tensor(np.zeros(normalized_shape), inputs.dtype) - assert inputs.shape[-len(normalized_shape):] == normalized_shape + + if inputs.shape[-len(normalized_shape):] != normalized_shape: + raise ValueError("For layer_norm, normalized_shape should fit inputs' shape" + f"but got input_shape: {inputs.shape}, normalized_shape: {normalized_shape}") _layer_norm = ms.ops.LayerNorm(epsilon=eps) out = _layer_norm(inputs, weight, bias) return cast_to_adapter_tensor(out[0]) diff --git a/ms_adapter/pytorch/nn/modules/loss.py b/ms_adapter/pytorch/nn/modules/loss.py index 7287090e..c4196496 100644 --- a/ms_adapter/pytorch/nn/modules/loss.py +++ b/ms_adapter/pytorch/nn/modules/loss.py @@ -203,7 +203,9 @@ class MultiMarginLoss(_WeightedLoss): super(MultiMarginLoss, self).__init__(weight, size_average, reduce, reduction) if p not in (1, 2): raise ValueError("only p == 1 and p == 2 supported") - assert weight is None or weight.dim() == 1 + if weight is not None and weight.dim() != 1: + raise ValueError(f"For MultiMarginLoss, `weight` must be 1-D, but got {weight.dim()}-D.") + self.p = p self.margin = margin -- 2.34.1