m2m模型翻译
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

67 lines
2.7 KiB

6 months ago
  1. # Copyright (c) Microsoft Corporation. All rights reserved.
  2. # Licensed under the MIT License.
  3. import argparse
  4. import logging
  5. import pathlib
  6. # need this before the mobile helper imports for some reason
  7. logging.basicConfig(format="%(levelname)s: %(message)s")
  8. from .mobile_helpers import check_model_can_use_ort_mobile_pkg, usability_checker # noqa
  9. def check_usability():
  10. parser = argparse.ArgumentParser(
  11. description="""Analyze an ONNX model to determine how well it will work in mobile scenarios, and whether
  12. it is likely to be able to use the pre-built ONNX Runtime Mobile Android or iOS package.""",
  13. formatter_class=argparse.ArgumentDefaultsHelpFormatter,
  14. )
  15. parser.add_argument(
  16. "--config_path",
  17. help="Path to required operators and types configuration used to build " "the pre-built ORT mobile package.",
  18. required=False,
  19. type=pathlib.Path,
  20. default=check_model_can_use_ort_mobile_pkg.get_default_config_path(),
  21. )
  22. parser.add_argument(
  23. "--log_level", choices=["debug", "info", "warning", "error"], default="info", help="Logging level"
  24. )
  25. parser.add_argument("model_path", help="Path to ONNX model to check", type=pathlib.Path)
  26. args = parser.parse_args()
  27. logger = logging.getLogger("check_usability")
  28. if args.log_level == "debug":
  29. logger.setLevel(logging.DEBUG)
  30. elif args.log_level == "info":
  31. logger.setLevel(logging.INFO)
  32. elif args.log_level == "warning":
  33. logger.setLevel(logging.WARNING)
  34. else:
  35. logger.setLevel(logging.ERROR)
  36. try_eps = usability_checker.analyze_model(args.model_path, skip_optimize=False, logger=logger)
  37. check_model_can_use_ort_mobile_pkg.run_check(args.model_path, args.config_path, logger)
  38. logger.info(
  39. "Run `python -m onnxruntime.tools.convert_onnx_models_to_ort ...` to convert the ONNX model to ORT "
  40. "format. "
  41. "By default, the conversion tool will create an ORT format model with saved optimizations which can "
  42. "potentially be applied at runtime (with a .with_runtime_opt.ort file extension) for use with NNAPI "
  43. "or CoreML, and a fully optimized ORT format model (with a .ort file extension) for use with the CPU "
  44. "EP."
  45. )
  46. if try_eps:
  47. logger.info(
  48. "As NNAPI or CoreML may provide benefits with this model it is recommended to compare the "
  49. "performance of the <model>.with_runtime_opt.ort model using the NNAPI EP on Android, and the "
  50. "CoreML EP on iOS, against the performance of the <model>.ort model using the CPU EP."
  51. )
  52. else:
  53. logger.info("For optimal performance the <model>.ort model should be used with the CPU EP. ")
  54. if __name__ == "__main__":
  55. check_usability()