@@ -207,6 +207,9 @@ void DataSharingProcessor::collectSymbolsForPrivatization() {
207207 }
208208 }
209209
210+ // TODO For common blocks, add the underlying objects within the block. Doing
211+ // so, we won't need to explicitely handle block objects (or forget to do
212+ // so).
210213 for (auto *sym : explicitlyPrivatizedSymbols)
211214 allPrivatizedSymbols.insert (sym);
212215}
@@ -235,82 +238,85 @@ void DataSharingProcessor::insertLastPrivateCompare(mlir::Operation *op) {
235238 if (auto wrapper = mlir::dyn_cast<mlir::omp::LoopWrapperInterface>(op))
236239 loopOp = mlir::cast<mlir::omp::LoopNestOp>(wrapper.getWrappedLoop ());
237240
238- bool cmpCreated = false ;
239241 mlir::OpBuilder::InsertionGuard guard (firOpBuilder);
240- for (const omp::Clause &clause : clauses) {
241- if (clause.id != llvm::omp::OMPC_lastprivate)
242- continue ;
243- if (mlir::isa<mlir::omp::WsloopOp>(op) ||
244- mlir::isa<mlir::omp::SimdOp>(op)) {
245- // Update the original variable just before exiting the worksharing
246- // loop. Conversion as follows:
247- //
248- // omp.wsloop / omp.simd { omp.wsloop / omp.simd {
249- // omp.loop_nest { omp.loop_nest {
250- // ... ...
251- // store ===> store
252- // omp.yield %v = arith.addi %iv, %step
253- // } %cmp = %step < 0 ? %v < %ub : %v > %ub
254- // } fir.if %cmp {
255- // fir.store %v to %loopIV
256- // ^%lpv_update_blk:
257- // }
258- // omp.yield
259- // }
260- // }
261-
262- // Only generate the compare once in presence of multiple LastPrivate
263- // clauses.
264- if (cmpCreated)
265- continue ;
266- cmpCreated = true ;
267-
268- mlir::Location loc = loopOp.getLoc ();
269- mlir::Operation *lastOper = loopOp.getRegion ().back ().getTerminator ();
270- firOpBuilder.setInsertionPoint (lastOper);
271-
272- mlir::Value cmpOp;
273- llvm::SmallVector<mlir::Value> vs;
274- vs.reserve (loopOp.getIVs ().size ());
275- for (auto [iv, ub, step] :
276- llvm::zip_equal (loopOp.getIVs (), loopOp.getLoopUpperBounds (),
277- loopOp.getLoopSteps ())) {
278- // v = iv + step
279- // cmp = step < 0 ? v < ub : v > ub
280- mlir::Value v = firOpBuilder.create <mlir::arith::AddIOp>(loc, iv, step);
281- vs.push_back (v);
282- mlir::Value zero =
283- firOpBuilder.createIntegerConstant (loc, step.getType (), 0 );
284- mlir::Value negativeStep = firOpBuilder.create <mlir::arith::CmpIOp>(
285- loc, mlir::arith::CmpIPredicate::slt, step, zero);
286- mlir::Value vLT = firOpBuilder.create <mlir::arith::CmpIOp>(
287- loc, mlir::arith::CmpIPredicate::slt, v, ub);
288- mlir::Value vGT = firOpBuilder.create <mlir::arith::CmpIOp>(
289- loc, mlir::arith::CmpIPredicate::sgt, v, ub);
290- mlir::Value icmpOp = firOpBuilder.create <mlir::arith::SelectOp>(
291- loc, negativeStep, vLT, vGT);
292-
293- if (cmpOp) {
294- cmpOp = firOpBuilder.create <mlir::arith::AndIOp>(loc, cmpOp, icmpOp);
295- } else {
296- cmpOp = icmpOp;
297- }
298- }
242+ bool hasLastPrivate = [&]() {
243+ for (const semantics::Symbol *sym : allPrivatizedSymbols) {
244+ if (const auto *commonDet =
245+ sym->detailsIf <semantics::CommonBlockDetails>()) {
246+ for (const auto &mem : commonDet->objects ())
247+ if (mem->test (semantics::Symbol::Flag::OmpLastPrivate))
248+ return true ;
249+ } else if (sym->test (semantics::Symbol::Flag::OmpLastPrivate))
250+ return true ;
251+ }
299252
300- auto ifOp = firOpBuilder.create <fir::IfOp>(loc, cmpOp, /* else*/ false );
301- firOpBuilder.setInsertionPointToStart (&ifOp.getThenRegion ().front ());
302- for (auto [v, loopIV] : llvm::zip_equal (vs, loopIVs)) {
303- assert (loopIV && " loopIV was not set" );
304- firOpBuilder.createStoreWithConvert (loc, v, loopIV);
305- }
306- lastPrivIP = firOpBuilder.saveInsertionPoint ();
307- } else if (mlir::isa<mlir::omp::SectionsOp>(op)) {
308- // Already handled by genOMP()
309- } else {
310- TODO (converter.getCurrentLocation (),
311- " lastprivate clause in constructs other than "
312- " simd/worksharing-loop" );
253+ return false ;
254+ }();
255+
256+ if (!hasLastPrivate)
257+ return ;
258+
259+ if (mlir::isa<mlir::omp::WsloopOp>(op) || mlir::isa<mlir::omp::SimdOp>(op)) {
260+ // Update the original variable just before exiting the worksharing
261+ // loop. Conversion as follows:
262+ //
263+ // omp.wsloop / omp.simd { omp.wsloop / omp.simd {
264+ // omp.loop_nest { omp.loop_nest {
265+ // ... ...
266+ // store ===> store
267+ // omp.yield %v = arith.addi %iv, %step
268+ // } %cmp = %step < 0 ? %v < %ub : %v > %ub
269+ // } fir.if %cmp {
270+ // fir.store %v to %loopIV
271+ // ^%lpv_update_blk:
272+ // }
273+ // omp.yield
274+ // }
275+ // }
276+ mlir::Location loc = loopOp.getLoc ();
277+ mlir::Operation *lastOper = loopOp.getRegion ().back ().getTerminator ();
278+ firOpBuilder.setInsertionPoint (lastOper);
279+
280+ mlir::Value cmpOp;
281+ llvm::SmallVector<mlir::Value> vs;
282+ vs.reserve (loopOp.getIVs ().size ());
283+ for (auto [iv, ub, step] :
284+ llvm::zip_equal (loopOp.getIVs (), loopOp.getLoopUpperBounds (),
285+ loopOp.getLoopSteps ())) {
286+ // v = iv + step
287+ // cmp = step < 0 ? v < ub : v > ub
288+ mlir::Value v = firOpBuilder.create <mlir::arith::AddIOp>(loc, iv, step);
289+ vs.push_back (v);
290+ mlir::Value zero =
291+ firOpBuilder.createIntegerConstant (loc, step.getType (), 0 );
292+ mlir::Value negativeStep = firOpBuilder.create <mlir::arith::CmpIOp>(
293+ loc, mlir::arith::CmpIPredicate::slt, step, zero);
294+ mlir::Value vLT = firOpBuilder.create <mlir::arith::CmpIOp>(
295+ loc, mlir::arith::CmpIPredicate::slt, v, ub);
296+ mlir::Value vGT = firOpBuilder.create <mlir::arith::CmpIOp>(
297+ loc, mlir::arith::CmpIPredicate::sgt, v, ub);
298+ mlir::Value icmpOp = firOpBuilder.create <mlir::arith::SelectOp>(
299+ loc, negativeStep, vLT, vGT);
300+
301+ if (cmpOp)
302+ cmpOp = firOpBuilder.create <mlir::arith::AndIOp>(loc, cmpOp, icmpOp);
303+ else
304+ cmpOp = icmpOp;
313305 }
306+
307+ auto ifOp = firOpBuilder.create <fir::IfOp>(loc, cmpOp, /* else*/ false );
308+ firOpBuilder.setInsertionPointToStart (&ifOp.getThenRegion ().front ());
309+ for (auto [v, loopIV] : llvm::zip_equal (vs, loopIVs)) {
310+ assert (loopIV && " loopIV was not set" );
311+ firOpBuilder.createStoreWithConvert (loc, v, loopIV);
312+ }
313+ lastPrivIP = firOpBuilder.saveInsertionPoint ();
314+ } else if (mlir::isa<mlir::omp::SectionsOp>(op)) {
315+ // Already handled by genOMP()
316+ } else {
317+ TODO (converter.getCurrentLocation (),
318+ " lastprivate clause in constructs other than "
319+ " simd/worksharing-loop" );
314320 }
315321}
316322
0 commit comments