import onnxruntime as ort import numpy as np # 测试加载每个模型 models = { "encoder": "/root/.cache/espnet_onnx/transformer_lm/full/default_encoder.onnx", "decoder": "/root/.cache/espnet_onnx/transformer_lm/full/xformer_decoder.onnx", "ctc": "/root/.cache/espnet_onnx/transformer_lm/full/ctc.onnx", "lm": "/root/.cache/espnet_onnx/transformer_lm/full/transformer_lm.onnx" } # /root/.cache/espnet_onnx/transformer_lm/full for name, path in models.items(): try: # 创建推理会话 session = ort.InferenceSession(path) # 获取输入信息 inputs = session.get_inputs() print(f"\n✅ {name} 模型加载成功") print(f" 输入: {[i.name for i in inputs]}") print(f" 形状: {[i.shape for i in inputs]}") except Exception as e: print(f"\n❌ {name} 模型加载失败: {e}")