Skip to content

Commit

Permalink
Comment out tosa/stablehlo/linalg default test with torchscript
Browse files Browse the repository at this point in the history
- To fix error when e2etest from torch_mlir import torchscript then output
  bug: No module named 'torch_mlir.jit_ir_importer'
  • Loading branch information
AmosLewis committed Aug 21, 2024
1 parent 6b27eda commit ddf555b
Show file tree
Hide file tree
Showing 2 changed files with 20 additions and 26 deletions.
43 changes: 20 additions & 23 deletions projects/pt1/e2e_testing/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,12 +19,9 @@
# Available test configs.
from torch_mlir_e2e_test.configs import (
LazyTensorCoreTestConfig,
LinalgOnTensorsBackendTestConfig,
StablehloBackendTestConfig,
NativeTorchTestConfig,
OnnxBackendTestConfig,
TorchScriptTestConfig,
TosaBackendTestConfig,
TorchDynamoTestConfig,
FxImporterTestConfig,
)
Expand Down Expand Up @@ -92,9 +89,9 @@ def _get_argparse():
default="linalg",
help=f"""
Meaning of options:
"linalg": run through torch-mlir"s default Linalg-on-Tensors backend.
"tosa": run through torch-mlir"s default TOSA backend.
"stablehlo": run through torch-mlir"s default Stablehlo backend.
# "linalg": run through torch-mlir"s default Linalg-on-Tensors backend.
# "tosa": run through torch-mlir"s default TOSA backend.
# "stablehlo": run through torch-mlir"s default Stablehlo backend.
"native_torch": run the torch.nn.Module as-is without compiling (useful for verifying model is deterministic; ALL tests should pass in this configuration).
"torchscript": compile the model to a torch.jit.ScriptModule, and then run that as-is (useful for verifying TorchScript is modeling the program correctly).
"lazy_tensor_core": run the model through the Lazy Tensor Core frontend and execute the traced graph.
Expand Down Expand Up @@ -152,23 +149,23 @@ def main():
all_test_unique_names = set(test.unique_name for test in GLOBAL_TEST_REGISTRY)

# Find the selected config.
if args.config == "linalg":
config = LinalgOnTensorsBackendTestConfig(RefBackendLinalgOnTensorsBackend())
xfail_set = LINALG_XFAIL_SET
crashing_set = LINALG_CRASHING_SET
elif args.config == "stablehlo":
config = StablehloBackendTestConfig(LinalgOnTensorsStablehloBackend())
xfail_set = all_test_unique_names - STABLEHLO_PASS_SET
crashing_set = STABLEHLO_CRASHING_SET
elif args.config == "tosa":
config = TosaBackendTestConfig(LinalgOnTensorsTosaBackend())
xfail_set = all_test_unique_names - TOSA_PASS_SET
crashing_set = TOSA_CRASHING_SET
elif args.config == "make_fx_tosa":
config = TosaBackendTestConfig(LinalgOnTensorsTosaBackend(), use_make_fx=True)
xfail_set = all_test_unique_names - MAKE_FX_TOSA_PASS_SET
crashing_set = MAKE_FX_TOSA_CRASHING_SET
elif args.config == "native_torch":
# if args.config == "linalg":
# config = LinalgOnTensorsBackendTestConfig(RefBackendLinalgOnTensorsBackend())
# xfail_set = LINALG_XFAIL_SET
# crashing_set = LINALG_CRASHING_SET
# elif args.config == "stablehlo":
# config = StablehloBackendTestConfig(LinalgOnTensorsStablehloBackend())
# xfail_set = all_test_unique_names - STABLEHLO_PASS_SET
# crashing_set = STABLEHLO_CRASHING_SET
# elif args.config == "tosa":
# config = TosaBackendTestConfig(LinalgOnTensorsTosaBackend())
# xfail_set = all_test_unique_names - TOSA_PASS_SET
# crashing_set = TOSA_CRASHING_SET
# elif args.config == "make_fx_tosa":
# config = TosaBackendTestConfig(LinalgOnTensorsTosaBackend(), use_make_fx=True)
# xfail_set = all_test_unique_names - MAKE_FX_TOSA_PASS_SET
# crashing_set = MAKE_FX_TOSA_CRASHING_SET
if args.config == "native_torch":
config = NativeTorchTestConfig()
xfail_set = set()
crashing_set = set()
Expand Down
3 changes: 0 additions & 3 deletions projects/pt1/python/torch_mlir_e2e_test/configs/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,11 +4,8 @@
# Also available under a BSD-style license. See LICENSE.

from .lazy_tensor_core import LazyTensorCoreTestConfig
from .linalg_on_tensors_backend import LinalgOnTensorsBackendTestConfig
from .native_torch import NativeTorchTestConfig
from .onnx_backend import OnnxBackendTestConfig
from .torchscript import TorchScriptTestConfig
from .stablehlo_backend import StablehloBackendTestConfig
from .tosa_backend import TosaBackendTestConfig
from .torchdynamo import TorchDynamoTestConfig
from .fx_importer_backend import FxImporterTestConfig

0 comments on commit ddf555b

Please sign in to comment.