@@ -212,6 +212,9 @@ void DataSharingProcessor::collectSymbolsForPrivatization() {
212212 }
213213 }
214214
215+ // TODO For common blocks, add the underlying objects within the block. Doing
216+ // so, we won't need to explicitely handle block objects (or forget to do
217+ // so).
215218 for (auto *sym : explicitlyPrivatizedSymbols)
216219 allPrivatizedSymbols.insert (sym);
217220}
@@ -240,82 +243,85 @@ void DataSharingProcessor::insertLastPrivateCompare(mlir::Operation *op) {
240243 if (auto wrapper = mlir::dyn_cast<mlir::omp::LoopWrapperInterface>(op))
241244 loopOp = mlir::cast<mlir::omp::LoopNestOp>(wrapper.getWrappedLoop ());
242245
243- bool cmpCreated = false ;
244246 mlir::OpBuilder::InsertionGuard guard (firOpBuilder);
245- for (const omp::Clause &clause : clauses) {
246- if (clause.id != llvm::omp::OMPC_lastprivate)
247- continue ;
248- if (mlir::isa<mlir::omp::WsloopOp>(op) ||
249- mlir::isa<mlir::omp::SimdOp>(op)) {
250- // Update the original variable just before exiting the worksharing
251- // loop. Conversion as follows:
252- //
253- // omp.wsloop / omp.simd { omp.wsloop / omp.simd {
254- // omp.loop_nest { omp.loop_nest {
255- // ... ...
256- // store ===> store
257- // omp.yield %v = arith.addi %iv, %step
258- // } %cmp = %step < 0 ? %v < %ub : %v > %ub
259- // } fir.if %cmp {
260- // fir.store %v to %loopIV
261- // ^%lpv_update_blk:
262- // }
263- // omp.yield
264- // }
265- // }
266-
267- // Only generate the compare once in presence of multiple LastPrivate
268- // clauses.
269- if (cmpCreated)
270- continue ;
271- cmpCreated = true ;
272-
273- mlir::Location loc = loopOp.getLoc ();
274- mlir::Operation *lastOper = loopOp.getRegion ().back ().getTerminator ();
275- firOpBuilder.setInsertionPoint (lastOper);
276-
277- mlir::Value cmpOp;
278- llvm::SmallVector<mlir::Value> vs;
279- vs.reserve (loopOp.getIVs ().size ());
280- for (auto [iv, ub, step] :
281- llvm::zip_equal (loopOp.getIVs (), loopOp.getLoopUpperBounds (),
282- loopOp.getLoopSteps ())) {
283- // v = iv + step
284- // cmp = step < 0 ? v < ub : v > ub
285- mlir::Value v = firOpBuilder.create <mlir::arith::AddIOp>(loc, iv, step);
286- vs.push_back (v);
287- mlir::Value zero =
288- firOpBuilder.createIntegerConstant (loc, step.getType (), 0 );
289- mlir::Value negativeStep = firOpBuilder.create <mlir::arith::CmpIOp>(
290- loc, mlir::arith::CmpIPredicate::slt, step, zero);
291- mlir::Value vLT = firOpBuilder.create <mlir::arith::CmpIOp>(
292- loc, mlir::arith::CmpIPredicate::slt, v, ub);
293- mlir::Value vGT = firOpBuilder.create <mlir::arith::CmpIOp>(
294- loc, mlir::arith::CmpIPredicate::sgt, v, ub);
295- mlir::Value icmpOp = firOpBuilder.create <mlir::arith::SelectOp>(
296- loc, negativeStep, vLT, vGT);
297-
298- if (cmpOp) {
299- cmpOp = firOpBuilder.create <mlir::arith::AndIOp>(loc, cmpOp, icmpOp);
300- } else {
301- cmpOp = icmpOp;
302- }
303- }
247+ bool hasLastPrivate = [&]() {
248+ for (const semantics::Symbol *sym : allPrivatizedSymbols) {
249+ if (const auto *commonDet =
250+ sym->detailsIf <semantics::CommonBlockDetails>()) {
251+ for (const auto &mem : commonDet->objects ())
252+ if (mem->test (semantics::Symbol::Flag::OmpLastPrivate))
253+ return true ;
254+ } else if (sym->test (semantics::Symbol::Flag::OmpLastPrivate))
255+ return true ;
256+ }
304257
305- auto ifOp = firOpBuilder.create <fir::IfOp>(loc, cmpOp, /* else*/ false );
306- firOpBuilder.setInsertionPointToStart (&ifOp.getThenRegion ().front ());
307- for (auto [v, loopIV] : llvm::zip_equal (vs, loopIVs)) {
308- assert (loopIV && " loopIV was not set" );
309- firOpBuilder.createStoreWithConvert (loc, v, loopIV);
310- }
311- lastPrivIP = firOpBuilder.saveInsertionPoint ();
312- } else if (mlir::isa<mlir::omp::SectionsOp>(op)) {
313- // Already handled by genOMP()
314- } else {
315- TODO (converter.getCurrentLocation (),
316- " lastprivate clause in constructs other than "
317- " simd/worksharing-loop" );
258+ return false ;
259+ }();
260+
261+ if (!hasLastPrivate)
262+ return ;
263+
264+ if (mlir::isa<mlir::omp::WsloopOp>(op) || mlir::isa<mlir::omp::SimdOp>(op)) {
265+ // Update the original variable just before exiting the worksharing
266+ // loop. Conversion as follows:
267+ //
268+ // omp.wsloop / omp.simd { omp.wsloop / omp.simd {
269+ // omp.loop_nest { omp.loop_nest {
270+ // ... ...
271+ // store ===> store
272+ // omp.yield %v = arith.addi %iv, %step
273+ // } %cmp = %step < 0 ? %v < %ub : %v > %ub
274+ // } fir.if %cmp {
275+ // fir.store %v to %loopIV
276+ // ^%lpv_update_blk:
277+ // }
278+ // omp.yield
279+ // }
280+ // }
281+ mlir::Location loc = loopOp.getLoc ();
282+ mlir::Operation *lastOper = loopOp.getRegion ().back ().getTerminator ();
283+ firOpBuilder.setInsertionPoint (lastOper);
284+
285+ mlir::Value cmpOp;
286+ llvm::SmallVector<mlir::Value> vs;
287+ vs.reserve (loopOp.getIVs ().size ());
288+ for (auto [iv, ub, step] :
289+ llvm::zip_equal (loopOp.getIVs (), loopOp.getLoopUpperBounds (),
290+ loopOp.getLoopSteps ())) {
291+ // v = iv + step
292+ // cmp = step < 0 ? v < ub : v > ub
293+ mlir::Value v = firOpBuilder.create <mlir::arith::AddIOp>(loc, iv, step);
294+ vs.push_back (v);
295+ mlir::Value zero =
296+ firOpBuilder.createIntegerConstant (loc, step.getType (), 0 );
297+ mlir::Value negativeStep = firOpBuilder.create <mlir::arith::CmpIOp>(
298+ loc, mlir::arith::CmpIPredicate::slt, step, zero);
299+ mlir::Value vLT = firOpBuilder.create <mlir::arith::CmpIOp>(
300+ loc, mlir::arith::CmpIPredicate::slt, v, ub);
301+ mlir::Value vGT = firOpBuilder.create <mlir::arith::CmpIOp>(
302+ loc, mlir::arith::CmpIPredicate::sgt, v, ub);
303+ mlir::Value icmpOp = firOpBuilder.create <mlir::arith::SelectOp>(
304+ loc, negativeStep, vLT, vGT);
305+
306+ if (cmpOp)
307+ cmpOp = firOpBuilder.create <mlir::arith::AndIOp>(loc, cmpOp, icmpOp);
308+ else
309+ cmpOp = icmpOp;
318310 }
311+
312+ auto ifOp = firOpBuilder.create <fir::IfOp>(loc, cmpOp, /* else*/ false );
313+ firOpBuilder.setInsertionPointToStart (&ifOp.getThenRegion ().front ());
314+ for (auto [v, loopIV] : llvm::zip_equal (vs, loopIVs)) {
315+ assert (loopIV && " loopIV was not set" );
316+ firOpBuilder.createStoreWithConvert (loc, v, loopIV);
317+ }
318+ lastPrivIP = firOpBuilder.saveInsertionPoint ();
319+ } else if (mlir::isa<mlir::omp::SectionsOp>(op)) {
320+ // Already handled by genOMP()
321+ } else {
322+ TODO (converter.getCurrentLocation (),
323+ " lastprivate clause in constructs other than "
324+ " simd/worksharing-loop" );
319325 }
320326}
321327
0 commit comments