From 37d14420f21496a118669aa5f01cd5c6881e1c0e Mon Sep 17 00:00:00 2001 From: zxorange_321 Date: Mon, 11 Aug 2025 09:38:52 +0800 Subject: [PATCH 1/8] 1 --- .../hstu_dense_forward/op_host/tiling_policy.cpp | 2 +- .../hstu_dense/test_hstu_dense_forward_demo.py | 15 +++++++++++++++ 2 files changed, 16 insertions(+), 1 deletion(-) diff --git a/mxrec_add_ons/rec_for_torch/operators/hstu_dense_forward/op_host/tiling_policy.cpp b/mxrec_add_ons/rec_for_torch/operators/hstu_dense_forward/op_host/tiling_policy.cpp index 320487da..51b9e07d 100644 --- a/mxrec_add_ons/rec_for_torch/operators/hstu_dense_forward/op_host/tiling_policy.cpp +++ b/mxrec_add_ons/rec_for_torch/operators/hstu_dense_forward/op_host/tiling_policy.cpp @@ -31,7 +31,7 @@ ShapeRange::ShapeRange(int64_t lbound, int64_t ubound, int64_t mutiple, const ch bool ShapeRange::Check(int64_t val) const { OPS_CHECK((val < lbound || val > ubound || val % mutiple != 0), - OPS_LOG_E("%s must meet range[%lld %lld] and mutiple of [%lld]. but get value %lld\n", name, lbound, + OPS_LOG_E("", "%s must meet range[%lld %lld] and mutiple of [%lld]. but get value %lld\n", name, lbound, ubound, mutiple, val), return false); return true; diff --git a/mxrec_add_ons/rec_for_torch/torch_plugin/torch_demo/hstu_dense/test_hstu_dense_forward_demo.py b/mxrec_add_ons/rec_for_torch/torch_plugin/torch_demo/hstu_dense/test_hstu_dense_forward_demo.py index 66039966..cc8f9c69 100644 --- a/mxrec_add_ons/rec_for_torch/torch_plugin/torch_demo/hstu_dense/test_hstu_dense_forward_demo.py +++ b/mxrec_add_ons/rec_for_torch/torch_plugin/torch_demo/hstu_dense/test_hstu_dense_forward_demo.py @@ -239,6 +239,21 @@ class TestHstuJaggedDemo: def test_hstu_dens_forward_2048bs(self, head_num, max_seq_len, head_dim, enable_bias, mask_type, silu_scale, data_type): self.execute(2048, max_seq_len, head_num, head_dim, enable_bias, mask_type, silu_scale, data_type) + + @pytest.mark.parametrize("head_num", [255]) + @pytest.mark.parametrize("max_seq_len", [16]) + @pytest.mark.parametrize("head_dim", [256]) + @pytest.mark.parametrize("enable_bias", [True]) + @pytest.mark.parametrize("mask_type", [ mask_custom]) + @pytest.mark.parametrize("silu_scale", [1 / 1024]) + @pytest.mark.parametrize("data_type", [torch.bfloat16]) + @pytest.mark.skipif(get_chip(), reason="This test case is Skipped for Ascend310P.") + def test_hstu_dens_forward_head_255(self, head_num, max_seq_len, head_dim, enable_bias, mask_type, silu_scale, + data_type): + with pytest.raises(RuntimeError) as e_info: + self.execute(20, max_seq_len, head_num, head_dim, enable_bias, mask_type, silu_scale, data_type) + assert "head num must meet range[2 8] and multiple of [2]. but get value 255" in str(e_info.value) + class TestHstuNormalDemo: -- Gitee From 484b77ffb10d64c6e0dd55a51ff2f67fe25ff29b Mon Sep 17 00:00:00 2001 From: zxorange_321 Date: Mon, 11 Aug 2025 09:40:35 +0800 Subject: [PATCH 2/8] 1 --- .../torch_demo/hstu_dense/test_hstu_dense_forward_demo.py | 1 - 1 file changed, 1 deletion(-) diff --git a/mxrec_add_ons/rec_for_torch/torch_plugin/torch_demo/hstu_dense/test_hstu_dense_forward_demo.py b/mxrec_add_ons/rec_for_torch/torch_plugin/torch_demo/hstu_dense/test_hstu_dense_forward_demo.py index cc8f9c69..96f1fdb8 100644 --- a/mxrec_add_ons/rec_for_torch/torch_plugin/torch_demo/hstu_dense/test_hstu_dense_forward_demo.py +++ b/mxrec_add_ons/rec_for_torch/torch_plugin/torch_demo/hstu_dense/test_hstu_dense_forward_demo.py @@ -255,7 +255,6 @@ class TestHstuJaggedDemo: assert "head num must meet range[2 8] and multiple of [2]. but get value 255" in str(e_info.value) - class TestHstuNormalDemo: @staticmethod def gloden_op_exec(q, k, v, bias, mask, mask_type, max_seq_len, silu_scale, enable_bias, data_type): -- Gitee From b9093ebcf1f89a4b11522d55c6d6abeb8d3b4182 Mon Sep 17 00:00:00 2001 From: zxorange_321 Date: Mon, 11 Aug 2025 09:41:26 +0800 Subject: [PATCH 3/8] 1 --- .../torch_demo/hstu_dense/test_hstu_dense_forward_demo.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mxrec_add_ons/rec_for_torch/torch_plugin/torch_demo/hstu_dense/test_hstu_dense_forward_demo.py b/mxrec_add_ons/rec_for_torch/torch_plugin/torch_demo/hstu_dense/test_hstu_dense_forward_demo.py index 96f1fdb8..95f8750e 100644 --- a/mxrec_add_ons/rec_for_torch/torch_plugin/torch_demo/hstu_dense/test_hstu_dense_forward_demo.py +++ b/mxrec_add_ons/rec_for_torch/torch_plugin/torch_demo/hstu_dense/test_hstu_dense_forward_demo.py @@ -249,7 +249,7 @@ class TestHstuJaggedDemo: @pytest.mark.parametrize("data_type", [torch.bfloat16]) @pytest.mark.skipif(get_chip(), reason="This test case is Skipped for Ascend310P.") def test_hstu_dens_forward_head_255(self, head_num, max_seq_len, head_dim, enable_bias, mask_type, silu_scale, - data_type): + data_type): with pytest.raises(RuntimeError) as e_info: self.execute(20, max_seq_len, head_num, head_dim, enable_bias, mask_type, silu_scale, data_type) assert "head num must meet range[2 8] and multiple of [2]. but get value 255" in str(e_info.value) -- Gitee From 122eb8c2ddc5b0fe9558bdd1cd2d9eeed45afbab Mon Sep 17 00:00:00 2001 From: zxorange_321 Date: Mon, 11 Aug 2025 09:48:05 +0800 Subject: [PATCH 4/8] 1 --- .../torch_demo/hstu_dense/test_hstu_dense_forward_demo.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mxrec_add_ons/rec_for_torch/torch_plugin/torch_demo/hstu_dense/test_hstu_dense_forward_demo.py b/mxrec_add_ons/rec_for_torch/torch_plugin/torch_demo/hstu_dense/test_hstu_dense_forward_demo.py index 95f8750e..51a195da 100644 --- a/mxrec_add_ons/rec_for_torch/torch_plugin/torch_demo/hstu_dense/test_hstu_dense_forward_demo.py +++ b/mxrec_add_ons/rec_for_torch/torch_plugin/torch_demo/hstu_dense/test_hstu_dense_forward_demo.py @@ -252,7 +252,7 @@ class TestHstuJaggedDemo: data_type): with pytest.raises(RuntimeError) as e_info: self.execute(20, max_seq_len, head_num, head_dim, enable_bias, mask_type, silu_scale, data_type) - assert "head num must meet range[2 8] and multiple of [2]. but get value 255" in str(e_info.value) + assert "head num must meet range[2 8] and mutiple of [2]. but get value 255" in str(e_info.value) class TestHstuNormalDemo: -- Gitee From 812262e4790a724678519c46fd1963bfa538d6de Mon Sep 17 00:00:00 2001 From: zxorange_321 Date: Mon, 11 Aug 2025 09:51:15 +0800 Subject: [PATCH 5/8] 1 --- .../hstu_dense_forward/op_host/tiling_policy.cpp | 2 +- .../hstu_dense/test_hstu_dense_forward_demo.py | 14 ++++++++++++++ 2 files changed, 15 insertions(+), 1 deletion(-) diff --git a/mxrec_add_ons/rec_for_torch/operators/hstu_dense_forward/op_host/tiling_policy.cpp b/mxrec_add_ons/rec_for_torch/operators/hstu_dense_forward/op_host/tiling_policy.cpp index 51b9e07d..66b7bbd3 100644 --- a/mxrec_add_ons/rec_for_torch/operators/hstu_dense_forward/op_host/tiling_policy.cpp +++ b/mxrec_add_ons/rec_for_torch/operators/hstu_dense_forward/op_host/tiling_policy.cpp @@ -21,7 +21,7 @@ See the License for the specific language governing permissions and namespace HstuDenseForward { ShapeRange::ShapeRange(int64_t lbound, int64_t ubound, int64_t mutiple, const char *name) -{ +{mxrec_add_ons/rec_for_torch/operators/hstu_dense_forward/op_host/tiling_policy.cpp this->lbound = lbound; this->ubound = ubound; this->mutiple = mutiple; diff --git a/mxrec_add_ons/rec_for_torch/torch_plugin/torch_demo/hstu_dense/test_hstu_dense_forward_demo.py b/mxrec_add_ons/rec_for_torch/torch_plugin/torch_demo/hstu_dense/test_hstu_dense_forward_demo.py index 51a195da..922e4d97 100644 --- a/mxrec_add_ons/rec_for_torch/torch_plugin/torch_demo/hstu_dense/test_hstu_dense_forward_demo.py +++ b/mxrec_add_ons/rec_for_torch/torch_plugin/torch_demo/hstu_dense/test_hstu_dense_forward_demo.py @@ -253,6 +253,20 @@ class TestHstuJaggedDemo: with pytest.raises(RuntimeError) as e_info: self.execute(20, max_seq_len, head_num, head_dim, enable_bias, mask_type, silu_scale, data_type) assert "head num must meet range[2 8] and mutiple of [2]. but get value 255" in str(e_info.value) + + @pytest.mark.parametrize("head_num", [2]) + @pytest.mark.parametrize("max_seq_len", [16]) + @pytest.mark.parametrize("head_dim", [255]) + @pytest.mark.parametrize("enable_bias", [True]) + @pytest.mark.parametrize("mask_type", [ mask_custom]) + @pytest.mark.parametrize("silu_scale", [1 / 1024]) + @pytest.mark.parametrize("data_type", [torch.bfloat16]) + @pytest.mark.skipif(get_chip(), reason="This test case is Skipped for Ascend310P.") + def test_hstu_dens_forward_head_255(self, head_num, max_seq_len, head_dim, enable_bias, mask_type, silu_scale, + data_type): + with pytest.raises(RuntimeError) as e_info: + self.execute(20, max_seq_len, head_num, head_dim, enable_bias, mask_type, silu_scale, data_type) + assert "dim size must meet range[16 512] and mutiple of [16]. but get value 255" in str(e_info.value) class TestHstuNormalDemo: -- Gitee From 606dc43576200db868a5ea36286d951851a3c028 Mon Sep 17 00:00:00 2001 From: zxorange_321 Date: Mon, 11 Aug 2025 09:51:42 +0800 Subject: [PATCH 6/8] 1 --- .../torch_demo/hstu_dense/test_hstu_dense_forward_demo.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/mxrec_add_ons/rec_for_torch/torch_plugin/torch_demo/hstu_dense/test_hstu_dense_forward_demo.py b/mxrec_add_ons/rec_for_torch/torch_plugin/torch_demo/hstu_dense/test_hstu_dense_forward_demo.py index 922e4d97..75ff9cd5 100644 --- a/mxrec_add_ons/rec_for_torch/torch_plugin/torch_demo/hstu_dense/test_hstu_dense_forward_demo.py +++ b/mxrec_add_ons/rec_for_torch/torch_plugin/torch_demo/hstu_dense/test_hstu_dense_forward_demo.py @@ -248,7 +248,7 @@ class TestHstuJaggedDemo: @pytest.mark.parametrize("silu_scale", [1 / 1024]) @pytest.mark.parametrize("data_type", [torch.bfloat16]) @pytest.mark.skipif(get_chip(), reason="This test case is Skipped for Ascend310P.") - def test_hstu_dens_forward_head_255(self, head_num, max_seq_len, head_dim, enable_bias, mask_type, silu_scale, + def test_hstu_dens_forward_head_num_255(self, head_num, max_seq_len, head_dim, enable_bias, mask_type, silu_scale, data_type): with pytest.raises(RuntimeError) as e_info: self.execute(20, max_seq_len, head_num, head_dim, enable_bias, mask_type, silu_scale, data_type) @@ -262,7 +262,7 @@ class TestHstuJaggedDemo: @pytest.mark.parametrize("silu_scale", [1 / 1024]) @pytest.mark.parametrize("data_type", [torch.bfloat16]) @pytest.mark.skipif(get_chip(), reason="This test case is Skipped for Ascend310P.") - def test_hstu_dens_forward_head_255(self, head_num, max_seq_len, head_dim, enable_bias, mask_type, silu_scale, + def test_hstu_dens_forward_head_dim_255(self, head_num, max_seq_len, head_dim, enable_bias, mask_type, silu_scale, data_type): with pytest.raises(RuntimeError) as e_info: self.execute(20, max_seq_len, head_num, head_dim, enable_bias, mask_type, silu_scale, data_type) -- Gitee From 6a5a3b40b465e540aa1b9402f4f9789c31b11206 Mon Sep 17 00:00:00 2001 From: zxorange_321 Date: Mon, 11 Aug 2025 10:10:32 +0800 Subject: [PATCH 7/8] 1 --- .../operators/hstu_dense_forward/op_host/tiling_policy.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mxrec_add_ons/rec_for_torch/operators/hstu_dense_forward/op_host/tiling_policy.cpp b/mxrec_add_ons/rec_for_torch/operators/hstu_dense_forward/op_host/tiling_policy.cpp index 66b7bbd3..51b9e07d 100644 --- a/mxrec_add_ons/rec_for_torch/operators/hstu_dense_forward/op_host/tiling_policy.cpp +++ b/mxrec_add_ons/rec_for_torch/operators/hstu_dense_forward/op_host/tiling_policy.cpp @@ -21,7 +21,7 @@ See the License for the specific language governing permissions and namespace HstuDenseForward { ShapeRange::ShapeRange(int64_t lbound, int64_t ubound, int64_t mutiple, const char *name) -{mxrec_add_ons/rec_for_torch/operators/hstu_dense_forward/op_host/tiling_policy.cpp +{ this->lbound = lbound; this->ubound = ubound; this->mutiple = mutiple; -- Gitee From 377da2b07d58842abfb9a0c2e88199b861af5df8 Mon Sep 17 00:00:00 2001 From: zxorange_321 Date: Mon, 11 Aug 2025 10:13:57 +0800 Subject: [PATCH 8/8] 1 --- .../torch_demo/hstu_dense/test_hstu_dense_forward_demo.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/mxrec_add_ons/rec_for_torch/torch_plugin/torch_demo/hstu_dense/test_hstu_dense_forward_demo.py b/mxrec_add_ons/rec_for_torch/torch_plugin/torch_demo/hstu_dense/test_hstu_dense_forward_demo.py index 75ff9cd5..194d6d29 100644 --- a/mxrec_add_ons/rec_for_torch/torch_plugin/torch_demo/hstu_dense/test_hstu_dense_forward_demo.py +++ b/mxrec_add_ons/rec_for_torch/torch_plugin/torch_demo/hstu_dense/test_hstu_dense_forward_demo.py @@ -244,7 +244,7 @@ class TestHstuJaggedDemo: @pytest.mark.parametrize("max_seq_len", [16]) @pytest.mark.parametrize("head_dim", [256]) @pytest.mark.parametrize("enable_bias", [True]) - @pytest.mark.parametrize("mask_type", [ mask_custom]) + @pytest.mark.parametrize("mask_type", [mask_custom]) @pytest.mark.parametrize("silu_scale", [1 / 1024]) @pytest.mark.parametrize("data_type", [torch.bfloat16]) @pytest.mark.skipif(get_chip(), reason="This test case is Skipped for Ascend310P.") @@ -258,7 +258,7 @@ class TestHstuJaggedDemo: @pytest.mark.parametrize("max_seq_len", [16]) @pytest.mark.parametrize("head_dim", [255]) @pytest.mark.parametrize("enable_bias", [True]) - @pytest.mark.parametrize("mask_type", [ mask_custom]) + @pytest.mark.parametrize("mask_type", [mask_custom]) @pytest.mark.parametrize("silu_scale", [1 / 1024]) @pytest.mark.parametrize("data_type", [torch.bfloat16]) @pytest.mark.skipif(get_chip(), reason="This test case is Skipped for Ascend310P.") -- Gitee