Skip to content

Commit d5240bf

Browse files
committed
Merge branch 'develop' of https://github.com/PaddlePaddle/Paddle into ApiEnhance6
2 parents db3b1fb + 519aee5 commit d5240bf

File tree

230 files changed

+7953
-2560
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

230 files changed

+7953
-2560
lines changed

.github/workflows/rerun.yml

Lines changed: 30 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -296,3 +296,33 @@ jobs:
296296
OWNER: ${{ github.repository_owner }}
297297
REPO: ${{ github.event.repository.name }}
298298
JOB_NAME: 'Slice / Check bypass / Check bypass'
299+
300+
- name: Rerun Windows-GPU
301+
if: ${{ contains(github.event.comment.body, 'win') && contains(github.event.comment.body, 'gpu') }}
302+
uses: ./.github/actions/rerun-workflow
303+
with:
304+
PR_ID: ${{ github.event.issue.number }}
305+
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
306+
OWNER: ${{ github.repository_owner }}
307+
REPO: ${{ github.event.repository.name }}
308+
JOB_NAME: 'Windows-GPU / Check bypass / Check bypass'
309+
310+
- name: Rerun Windows-Inference
311+
if: ${{ contains(github.event.comment.body, 'win') && contains(github.event.comment.body, 'infer')}}
312+
uses: ./.github/actions/rerun-workflow
313+
with:
314+
PR_ID: ${{ github.event.issue.number }}
315+
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
316+
OWNER: ${{ github.repository_owner }}
317+
REPO: ${{ github.event.repository.name }}
318+
JOB_NAME: 'Windows-Inference / Check bypass / Check bypass'
319+
320+
- name: Rerun Windows-OPENBLAS
321+
if: ${{ contains(github.event.comment.body, 'win') && contains(github.event.comment.body, 'openblas')}}
322+
uses: ./.github/actions/rerun-workflow
323+
with:
324+
PR_ID: ${{ github.event.issue.number }}
325+
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
326+
OWNER: ${{ github.repository_owner }}
327+
REPO: ${{ github.event.repository.name }}
328+
JOB_NAME: 'Windows-OPENBLAS / Check bypass / Check bypass'

_typos.toml

Lines changed: 0 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -49,13 +49,3 @@ UNEXPECT = 'UNEXPECT'
4949
tood = 'tood'
5050
unpacket = "unpacket"
5151
vaccum = 'vaccum'
52-
53-
# need to fix
54-
Detecter = 'Detecter'
55-
dismatch = 'dismatch'
56-
distributer = 'distributer'
57-
duoble = 'duoble'
58-
Grap = 'Grap'
59-
Simplied = 'Simplied'
60-
precess = 'precess'
61-
sittin = 'sittin'

ci/h-test.sh

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -161,7 +161,10 @@ concurrency_list="^test_fp8_deep_gemm$|\
161161
^test_dist_fuse_gemm_epilogue_pass$|\
162162
^test_fuse_allreduce_split_to_reducescatter_pass$|\
163163
^test_ps_server_pass$|\
164-
^test_white_lists$"
164+
^test_white_lists$|\
165+
^test_scaled_dot_product_attention$|\
166+
^test_compat_scaled_dot_product_attention$|\
167+
^test_flash_attention$"
165168

166169
cd ${work_dir}/build
167170
tmp_dir=`mktemp -d`

ci/rule-tests/no-deeply-nested-loops-test.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -169,7 +169,7 @@ valid:
169169
for e in range(10):
170170
consume(a, b, c, d, e)
171171
172-
# 5 levels breaked by extra code - OK
172+
# 5 levels broken by extra code - OK
173173
- |
174174
for i in range(10):
175175
for j in range(10):

paddle/cinn/optim/eliminate_common_factor_of_local_index.cc

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -166,9 +166,9 @@ CollectLocalVarToIndexes(ir::stmt::BlockRef func_body) {
166166
}
167167

168168
int ExtractMulNumberFromExpr(const ir::Expr& expr) {
169-
ir::Expr simplied_expr = optim::ArithSimplify(expr);
170-
if (simplied_expr.is_constant()) {
171-
return static_cast<int>(simplied_expr.get_constant());
169+
ir::Expr simplified_expr = optim::ArithSimplify(expr);
170+
if (simplified_expr.is_constant()) {
171+
return static_cast<int>(simplified_expr.get_constant());
172172
} else if (expr.As<ir::Mul>()) {
173173
auto mul = expr.As<ir::Mul>();
174174
return ExtractMulNumberFromExpr(mul->a()) *
@@ -181,9 +181,9 @@ int ExtractMulNumberFromExpr(const ir::Expr& expr) {
181181
}
182182

183183
int ExtractAddNumberFromExpr(const ir::Expr& expr) {
184-
ir::Expr simplied_expr = optim::ArithSimplify(expr);
185-
if (simplied_expr.is_constant()) {
186-
return static_cast<int>(simplied_expr.get_constant());
184+
ir::Expr simplified_expr = optim::ArithSimplify(expr);
185+
if (simplified_expr.is_constant()) {
186+
return static_cast<int>(simplified_expr.get_constant());
187187
} else if (expr.As<ir::Add>()) {
188188
auto add = expr.As<ir::Add>();
189189
return ExtractAddNumberFromExpr(add->a()) +
@@ -203,8 +203,8 @@ int gcd(int a, int b) {
203203
}
204204

205205
ir::Expr ExtractSymbolicFromExpr(const ir::Expr& expr) {
206-
ir::Expr simplied_expr = optim::ArithSimplify(expr);
207-
if (simplied_expr.is_constant()) {
206+
ir::Expr simplified_expr = optim::ArithSimplify(expr);
207+
if (simplified_expr.is_constant()) {
208208
return ir::Expr(0);
209209
} else if (expr.As<ir::_Var_>()) {
210210
auto var = expr.As<ir::_Var_>();

paddle/fluid/distributed/collective/reducer.cc

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -781,7 +781,7 @@ void EagerReducer::AddDistHook(size_t var_index) {
781781
}
782782

783783
VLOG(3) << "Tensor[" << var_index << "] [" << tensors_[var_index].name()
784-
<< "@Grad] arrived and triggered disthook";
784+
<< "@GRAD] arrived and triggered DistHook";
785785

786786
local_used_vars_[var_index] = 1;
787787

paddle/fluid/eager/auto_code_generator/generator/eager_gen.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -93,6 +93,7 @@
9393
"acos_double_grad",
9494
"put_along_axis_double_grad",
9595
"masked_fill_double_grad",
96+
"index_elementwise_put_with_tensor_double_grad",
9697
]
9798

9899
# white ops list whose kernel can automatically do type promotion.

paddle/fluid/framework/ir/coalesce_grad_tensor_pass.cc

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -157,25 +157,25 @@ class CoalesceGradTensorPass : public ir::Pass {
157157
PADDLE_ENFORCE_EQ(iter != vars_info.end(),
158158
true,
159159
common::errors::NotFound(
160-
"Parameter@Grad %s is not found.", p_g.second));
160+
"Parameter@GRAD %s is not found.", p_g.second));
161161
PADDLE_ENFORCE_EQ(
162162
!iter->second.empty(),
163163
true,
164164
common::errors::InvalidArgument(
165-
"Parameter@Grad %s's var node is empty.", p_g.second));
165+
"Parameter@GRAD %s's var node is empty.", p_g.second));
166166
for (auto it : iter->second) {
167167
PADDLE_ENFORCE_NOT_NULL(
168168
it->Var(),
169169
common::errors::InvalidArgument(
170-
"A node of Parameter@Grad %s does not hold variable.",
170+
"A node of Parameter@GRAD %s does not hold variable.",
171171
p_g.second));
172172
pinned_var_set->insert(it->Var()->Name());
173173
}
174174
PADDLE_ENFORCE_EQ(
175175
IsDenseTensorType(GetTypeOfVar(vars_info, p_g.second)),
176176
true,
177177
common::errors::InvalidArgument(
178-
"Parameter@Grad %s is not phi::DenseTensor.", p_g.second));
178+
"Parameter@GRAD %s is not phi::DenseTensor.", p_g.second));
179179
}
180180
}
181181

@@ -541,7 +541,7 @@ class CoalesceGradTensorPass : public ir::Pass {
541541
PADDLE_ENFORCE_EQ(next_dtype,
542542
dtype,
543543
common::errors::InvalidArgument(
544-
"All Parameter@Grad should have same dtype, but "
544+
"All Parameter@GRAD should have same dtype, but "
545545
"there are two different type: %s, %s.",
546546
DataTypeToString(next_dtype),
547547
DataTypeToString(dtype)));

paddle/fluid/framework/ir/fuse_optimizer_ops_pass/fuse_optimizer_op_pass.cc

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -513,7 +513,7 @@ void FuseOptimizerOpPass::SortParametersAndAuxVars(
513513
iter != grad_vec.end(),
514514
true,
515515
common::errors::NotFound(
516-
"Parameter@Grad(%s) is not found in gradient vector.", p_g.second));
516+
"Parameter@GRAD(%s) is not found in gradient vector.", p_g.second));
517517
auto idx = std::distance(grad_vec.begin(), iter);
518518
grad_sort_idx.emplace_back(idx);
519519
}

paddle/fluid/inference/api/analysis_predictor.cc

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3567,7 +3567,7 @@ USE_TRT_CONVERTER(merge_layernorm)
35673567
USE_TRT_CONVERTER(trans_layernorm)
35683568
USE_TRT_CONVERTER(skip_merge_layernorm)
35693569
USE_TRT_CONVERTER(generic_plugin_creator)
3570-
USE_TRT_CONVERTER(custom_plugin_creater) // typos: disable-line
3570+
USE_TRT_CONVERTER(custom_plugin_creator)
35713571
USE_TRT_CONVERTER(custom_generic_plugin_creator)
35723572
USE_TRT_CONVERTER(fuse_eleadd_transpose)
35733573
USE_TRT_CONVERTER(tanh_shrink)

0 commit comments

Comments
 (0)