{ "SKU参数": { "厂商": "NVIDIA", "型号": "A100 PCIe", "用途": "Training/Inference", "照片": "vendor_zoo/NVIDIA/image_3.png" }, "硬件参数": { "制程(NM)": 7, "尺寸": "FHFL, Dual Slot Card", "接口": "PCIe 4.0x16", "功耗(W/TDP)": 300 }, "内存参数": { "内存层次架构图": null, "内存": { "内存类型": "HBM2e", "内存容量(GB)": 80, "内存带宽(GB/s)": 1935 }, "一级缓存": { "缓存类型": "Cache(x108)", "缓存容量(MB)": 20.736, "缓存带宽(TB/s)": null }, "二级缓存": { "缓存类型": null, "缓存容量(MB)": 40, "缓存带宽(TB/s)": null } }, "算力参数": { "PE层次架构图": null, "PE参数": { "算力架构": "同构众核", "并行方式": "SIMT", "通信带宽(GB/s)": null }, "标量参数": { "标量精度": null, "INT8标量算力(TOPS)": null, "FP16标量算力(TFLOPS)": null, "FP32标量算力(TFLOPS)": null }, "向量参数": { "向量精度": "FP64, FP32, TF32", "INT8量算力(TOPS)": null, "FP16向量算力(TFLOPS)": null, "FP32向量算力(TFLOPS)": 19.5, "FP64向量算力(TFLOPS)": 9.7 }, "张量参数": { "张量精度": "FP64, FP32, TF32, FP16, BF16, INT8", "INT8张量算力(TOPS)": 624.0, "BF16张量算力(TFLOPS)": 312.0, "FP16张量算力(TFLOPS)": 312.0, "TF32张量算力(TFLOPS)": 156.0, "FP32张量算力(TFLOPS)": null, "FP64张量算力(TFLOPS)": 19.5 } }, "卡间通信参数": { "通信方式": "NV-Link", "端口数量": null, "RDMA协议": null, "下行带宽(GB/s)": null, "上行带宽(GB/s)": null }, "数据源": [ "https://www.nvidia.com/content/dam/en-zz/Solutions/Data-Center/a100/pdf/nvidia-a100-datasheet-nvidia-us-2188504-web.pdf", "https://www.techpowerup.com/gpu-specs/a100-pcie-80-gb.c3821" ] }