Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feature gap in sparsity #1125

Open
daisyden opened this issue Nov 28, 2024 · 0 comments
Open

feature gap in sparsity #1125

daisyden opened this issue Nov 28, 2024 · 0 comments
Assignees
Milestone

Comments

@daisyden
Copy link
Contributor

🚀 The feature, motivation and pitch

We have a list of unit test on sparsity is to be enabled.
"test_dtypes_sparse_sampled_addmm_xpu",
"test_compare_cpu_sparse_sampled_addmm_xpu_float32",
"test_errors_sparse_mul_layout0_xpu",
"test_errors_sparse_mul_layout1_xpu",
"test_errors_sparse_mul_layout2_xpu",
"test_errors_sparse_mul_layout3_xpu",
"test_out_requires_grad_error_sparse_sampled_addmm_xpu_complex64",
"test_out_requires_grad_error_sparse_sampled_addmm_xpu_float32",
# NotImplementedError: Could not run 'aten::_to_sparse_csr' with arguments from the 'SparseXPU' backend.
"test_mask_layout_sparse_coo_masked_amax_xpu_bfloat16",
"test_mask_layout_sparse_coo_masked_amax_xpu_float16",
"test_mask_layout_sparse_coo_masked_amax_xpu_float32",
"test_mask_layout_sparse_coo_masked_amax_xpu_float64",
"test_mask_layout_sparse_coo_masked_amin_xpu_bfloat16",
"test_mask_layout_sparse_coo_masked_amin_xpu_float16",
"test_mask_layout_sparse_coo_masked_amin_xpu_float32",
"test_mask_layout_sparse_coo_masked_amin_xpu_float64",
"test_mask_layout_sparse_coo_masked_prod_xpu_bfloat16",
"test_mask_layout_sparse_coo_masked_prod_xpu_bool",
"test_mask_layout_sparse_coo_masked_prod_xpu_complex128",
"test_mask_layout_sparse_coo_masked_prod_xpu_complex64",
"test_mask_layout_sparse_coo_masked_prod_xpu_float16",
"test_mask_layout_sparse_coo_masked_prod_xpu_float32",
"test_mask_layout_sparse_coo_masked_prod_xpu_float64",
"test_mask_layout_sparse_coo_masked_prod_xpu_int16",
"test_mask_layout_sparse_coo_masked_prod_xpu_int32",
"test_mask_layout_sparse_coo_masked_prod_xpu_int64",
"test_mask_layout_sparse_coo_masked_prod_xpu_int8",
"test_mask_layout_sparse_coo_masked_prod_xpu_uint8",
# NotImplementedError: Could not run 'aten::_to_sparse_csr' with arguments from the 'SparseXPU' backend.
"test_mask_layout_sparse_coo_masked_sum_xpu_bfloat16",
"test_mask_layout_sparse_coo_masked_sum_xpu_bool",
"test_mask_layout_sparse_coo_masked_sum_xpu_complex128",
"test_mask_layout_sparse_coo_masked_sum_xpu_complex64",
"test_mask_layout_sparse_coo_masked_sum_xpu_float16",
"test_mask_layout_sparse_coo_masked_sum_xpu_float32",
"test_mask_layout_sparse_coo_masked_sum_xpu_float64",
"test_mask_layout_sparse_coo_masked_sum_xpu_int16",
"test_mask_layout_sparse_coo_masked_sum_xpu_int32",
"test_mask_layout_sparse_coo_masked_sum_xpu_int64",
"test_mask_layout_sparse_coo_masked_sum_xpu_int8",
"test_mask_layout_sparse_coo_masked_sum_xpu_uint8",
"test_mask_layout_sparse_csr_masked_amax_xpu_bfloat16",
"test_mask_layout_sparse_csr_masked_amax_xpu_float16",
"test_mask_layout_sparse_csr_masked_amax_xpu_float32",
"test_mask_layout_sparse_csr_masked_amax_xpu_float64",
"test_mask_layout_sparse_csr_masked_amin_xpu_bfloat16",
"test_mask_layout_sparse_csr_masked_amin_xpu_float16",
"test_mask_layout_sparse_csr_masked_amin_xpu_float32",
"test_mask_layout_sparse_csr_masked_amin_xpu_float64",
"test_mask_layout_sparse_csr_masked_mean_xpu_bfloat16",
"test_mask_layout_sparse_csr_masked_mean_xpu_float16",
"test_mask_layout_sparse_csr_masked_mean_xpu_float32",
"test_mask_layout_sparse_csr_masked_mean_xpu_float64",
"test_mask_layout_sparse_csr_masked_prod_xpu_bfloat16",
"test_mask_layout_sparse_csr_masked_prod_xpu_bool",
"test_mask_layout_sparse_csr_masked_prod_xpu_complex128",
"test_mask_layout_sparse_csr_masked_prod_xpu_complex64",
"test_mask_layout_sparse_csr_masked_prod_xpu_float16",
"test_mask_layout_sparse_csr_masked_prod_xpu_float32",
"test_mask_layout_sparse_csr_masked_prod_xpu_float64",
"test_mask_layout_sparse_csr_masked_prod_xpu_int16",
"test_mask_layout_sparse_csr_masked_prod_xpu_int32",
"test_mask_layout_sparse_csr_masked_prod_xpu_int64",
"test_mask_layout_sparse_csr_masked_prod_xpu_int8",
"test_mask_layout_sparse_csr_masked_prod_xpu_uint8",
"test_mask_layout_sparse_csr_masked_sum_xpu_bfloat16",
"test_mask_layout_sparse_csr_masked_sum_xpu_bool",
"test_mask_layout_sparse_csr_masked_sum_xpu_complex128",
"test_mask_layout_sparse_csr_masked_sum_xpu_complex64",
"test_mask_layout_sparse_csr_masked_sum_xpu_float16",
"test_mask_layout_sparse_csr_masked_sum_xpu_float32",
"test_mask_layout_sparse_csr_masked_sum_xpu_float64",
"test_mask_layout_sparse_csr_masked_sum_xpu_int16",
"test_mask_layout_sparse_csr_masked_sum_xpu_int32",
"test_mask_layout_sparse_csr_masked_sum_xpu_int64",
"test_mask_layout_sparse_csr_masked_sum_xpu_int8",
"test_mask_layout_sparse_csr_masked_sum_xpu_uint8",
# NotImplementedError: Could not run 'aten::_to_sparse_csr' with arguments from the 'SparseXPU' backend.
"test_invalid_sparse_layout_xpu",
"test_to_dense_and_sparse_csr_xpu",
"test_sparse_xpu.py": (
"test_log_softmax_zero_nnz_xpu_float32", # - NotImplementedError: Could not run 'aten::_sparse_log_softmax' with arguments from the 'SparseXPU' backend. This could be because the operator doesn't exist for this ...
"test_log_softmax_zero_nnz_xpu_float64", # - NotImplementedError: Could not run 'aten::_sparse_log_softmax' with arguments from the 'SparseXPU' backend. This could be because the operator doesn't exist for this ...
"test_softmax_xpu_float64", # - NotImplementedError: Could not run 'aten::_sparse_softmax' with arguments from the 'SparseXPU' backend. This could be because the operator doesn't exist for this back...
"test_softmax_zero_nnz_xpu_float32", # - NotImplementedError: Could not run 'aten::_sparse_softmax' with arguments from the 'SparseXPU' backend. This could be because the operator doesn't exist for this back...
"test_softmax_zero_nnz_xpu_float64", # - NotImplementedError: Could not run 'aten::_sparse_softmax' with arguments from the 'SparseXPU' backend. This could be because the operator doesn't exist for this back...
"test_sparse_addmm_xpu_bfloat16", # - NotImplementedError: Could not run 'aten::addmm' with arguments from the 'SparseXPU' backend. This could be because the operator doesn't exist for this backend, or wa...
"test_sparse_addmm_xpu_complex128", # - NotImplementedError: Could not run 'aten::addmm' with arguments from the 'SparseXPU' backend. This could be because the operator doesn't exist for this backend, or wa...
"test_sparse_addmm_xpu_float16", # - NotImplementedError: Could not run 'aten::addmm' with arguments from the 'SparseXPU' backend. This could be because the operator doesn't exist for this backend, or wa...
"test_sparse_addmm_xpu_float64", # - NotImplementedError: Could not run 'aten::addmm' with arguments from the 'SparseXPU' backend. This could be because the operator doesn't exist for this backend, or wa...
"test_sparse_matmul_xpu_complex128", # - RuntimeError: Double and complex datatype matmul is not supported in oneDNN
"test_sparse_matmul_xpu_complex64", # - RuntimeError: Double and complex datatype matmul is not supported in oneDNN
"test_sparse_matmul_xpu_float32", # - NotImplementedError: Could not run 'aten::_sparse_sparse_matmul' with arguments from the 'SparseXPU' backend. This could be because the operator doesn't exist for thi...
"test_sparse_matmul_xpu_float64", # - RuntimeError: Double and complex datatype matmul is not supported in oneDNN
"test_sparse_mm_xpu_float64", # - NotImplementedError: Could not run 'aten::addmm' with arguments from the 'SparseXPU' backend. This could be because the operator doesn't exist for this backend, or wa...
"test_sparse_sum_xpu_float64", # - NotImplementedError: Could not run 'aten::_sparse_sum_backward' with arguments from the 'SparseXPU' backend. This could be because the operator doesn't exist for this...

Alternatives

No response

Additional context

No response

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment
Labels
None yet
Projects
None yet
Development

No branches or pull requests

2 participants