diff --git a/paddle/fluid/pir/dialect/op_generator/cache_grad_op_symbol_shape_gen.py b/paddle/fluid/pir/dialect/op_generator/cache_grad_op_symbol_shape_gen.py index 521c2adaf8479..7e17ecd4c96ea 100644 --- a/paddle/fluid/pir/dialect/op_generator/cache_grad_op_symbol_shape_gen.py +++ b/paddle/fluid/pir/dialect/op_generator/cache_grad_op_symbol_shape_gen.py @@ -90,7 +90,7 @@ cache_grad_op_shape_black_list = {"fused_attention"} -manual_grad_op_sym_infer_list = {"fuesd_attention"} +manual_grad_op_sym_infer_list = {"fused_attention"} class CacheGradOpSymbolShapeCodeGen: diff --git a/paddle/pir/src/dialect/shape/utils/shape_analysis.cc b/paddle/pir/src/dialect/shape/utils/shape_analysis.cc index 5cc5ed69888e6..8396dc764278e 100644 --- a/paddle/pir/src/dialect/shape/utils/shape_analysis.cc +++ b/paddle/pir/src/dialect/shape/utils/shape_analysis.cc @@ -149,7 +149,7 @@ void InferSymbolicShapeContext::SetSymbolForValueByStaticShape(Value val) { const auto& GetStaticShapeForDenseTensorType = [&](DenseTensorType type_info) -> symbol::TensorShapeOrDataDimExprs { std::vector static_shape; - for (int i = 0; i < type_info.dims().size(); ++i) { + for (int i = 0; i < common::vectorize(type_info.dims()).size(); ++i) { int dim = type_info.dims()[i]; if (dim > 0) { static_shape.emplace_back(dim);