@@ -69,6 +69,151 @@ mlir::LogicalResult CIRGenFunction::emitCXXTryStmt(const CXXTryStmt &s) {
6969 if (s.getTryBlock ()->body_empty ())
7070 return mlir::LogicalResult::success ();
7171
72- cgm.errorNYI (" exitCXXTryStmt: CXXTryStmt with non-empty body" );
73- return mlir::LogicalResult::success ();
72+ mlir::Location loc = getLoc (s.getSourceRange ());
73+ // Create a scope to hold try local storage for catch params.
74+
75+ mlir::OpBuilder::InsertPoint scopeIP;
76+ cir::ScopeOp::create (
77+ builder, loc,
78+ /* scopeBuilder=*/ [&](mlir::OpBuilder &b, mlir::Location loc) {
79+ scopeIP = builder.saveInsertionPoint ();
80+ });
81+
82+ mlir::OpBuilder::InsertionGuard guard (builder);
83+ builder.restoreInsertionPoint (scopeIP);
84+ mlir::LogicalResult result = emitCXXTryStmtUnderScope (s);
85+ cir::YieldOp::create (builder, loc);
86+ return result;
87+ }
88+
89+ mlir::LogicalResult
90+ CIRGenFunction::emitCXXTryStmtUnderScope (const CXXTryStmt &s) {
91+ const llvm::Triple &t = getTarget ().getTriple ();
92+ // If we encounter a try statement on in an OpenMP target region offloaded to
93+ // a GPU, we treat it as a basic block.
94+ const bool isTargetDevice =
95+ (cgm.getLangOpts ().OpenMPIsTargetDevice && (t.isNVPTX () || t.isAMDGCN ()));
96+ if (isTargetDevice) {
97+ cgm.errorNYI (
98+ " emitCXXTryStmtUnderScope: OpenMP target region offloaded to GPU" );
99+ return mlir::success ();
100+ }
101+
102+ unsigned numHandlers = s.getNumHandlers ();
103+ mlir::Location tryLoc = getLoc (s.getBeginLoc ());
104+ mlir::OpBuilder::InsertPoint beginInsertTryBody;
105+
106+ bool hasCatchAll = false ;
107+ for (unsigned i = 0 ; i != numHandlers; ++i) {
108+ hasCatchAll |= s.getHandler (i)->getExceptionDecl () == nullptr ;
109+ if (hasCatchAll)
110+ break ;
111+ }
112+
113+ // Create the scope to represent only the C/C++ `try {}` part. However,
114+ // don't populate right away. Reserve some space to store the exception
115+ // info but don't emit the bulk right away, for now only make sure the
116+ // scope returns the exception information.
117+ auto tryOp = cir::TryOp::create (
118+ builder, tryLoc,
119+ /* tryBuilder=*/
120+ [&](mlir::OpBuilder &b, mlir::Location loc) {
121+ beginInsertTryBody = builder.saveInsertionPoint ();
122+ },
123+ /* handlersBuilder=*/
124+ [&](mlir::OpBuilder &b, mlir::Location loc,
125+ mlir::OperationState &result) {
126+ mlir::OpBuilder::InsertionGuard guard (b);
127+
128+ // We create an extra region for an unwind catch handler in case the
129+ // catch-all handler doesn't exists
130+ unsigned numRegionsToCreate =
131+ hasCatchAll ? numHandlers : numHandlers + 1 ;
132+
133+ for (unsigned i = 0 ; i != numRegionsToCreate; ++i)
134+ builder.createBlock (result.addRegion ());
135+ });
136+
137+ // Finally emit the body for try/catch.
138+ {
139+ mlir::Location loc = tryOp.getLoc ();
140+ mlir::OpBuilder::InsertionGuard guard (builder);
141+ builder.restoreInsertionPoint (beginInsertTryBody);
142+ CIRGenFunction::LexicalScope tryScope{*this , loc,
143+ builder.getInsertionBlock ()};
144+
145+ tryScope.setAsTry (tryOp);
146+
147+ // Attach the basic blocks for the catch regions.
148+ enterCXXTryStmt (s, tryOp);
149+
150+ // Emit the body for the `try {}` part.
151+ {
152+ mlir::OpBuilder::InsertionGuard guard (builder);
153+ CIRGenFunction::LexicalScope tryBodyScope{*this , loc,
154+ builder.getInsertionBlock ()};
155+ if (emitStmt (s.getTryBlock (), /* useCurrentScope=*/ true ).failed ())
156+ return mlir::failure ();
157+ }
158+
159+ // Emit catch clauses.
160+ exitCXXTryStmt (s);
161+ }
162+
163+ return mlir::success ();
164+ }
165+
166+ void CIRGenFunction::enterCXXTryStmt (const CXXTryStmt &s, cir::TryOp tryOp,
167+ bool isFnTryBlock) {
168+ unsigned numHandlers = s.getNumHandlers ();
169+ EHCatchScope *catchScope = ehStack.pushCatch (numHandlers);
170+ for (unsigned i = 0 ; i != numHandlers; ++i) {
171+ const CXXCatchStmt *catchStmt = s.getHandler (i);
172+ if (catchStmt->getExceptionDecl ()) {
173+ cgm.errorNYI (" enterCXXTryStmt: CatchStmt with ExceptionDecl" );
174+ return ;
175+ }
176+
177+ // No exception decl indicates '...', a catch-all.
178+ mlir::Block *handler = &tryOp.getHandlerRegions ()[i].getBlocks ().front ();
179+ catchScope->setHandler (i, cgm.getCXXABI ().getCatchAllTypeInfo (), handler);
180+
181+ // Under async exceptions, catch(...) need to catch HW exception too
182+ // Mark scope with SehTryBegin as a SEH __try scope
183+ if (getLangOpts ().EHAsynch ) {
184+ cgm.errorNYI (" enterCXXTryStmt: EHAsynch" );
185+ return ;
186+ }
187+ }
188+ }
189+
190+ void CIRGenFunction::exitCXXTryStmt (const CXXTryStmt &s, bool isFnTryBlock) {
191+ unsigned numHandlers = s.getNumHandlers ();
192+ EHCatchScope &catchScope = cast<EHCatchScope>(*ehStack.begin ());
193+ assert (catchScope.getNumHandlers () == numHandlers);
194+ cir::TryOp tryOp = curLexScope->getTry ();
195+
196+ // If the catch was not required, bail out now.
197+ if (!catchScope.hasEHBranches ()) {
198+ catchScope.clearHandlerBlocks ();
199+ ehStack.popCatch ();
200+
201+ // Drop all basic block from all catch regions.
202+ SmallVector<mlir::Block *> eraseBlocks;
203+ for (mlir::Region &handlerRegion : tryOp.getHandlerRegions ()) {
204+ if (handlerRegion.empty ())
205+ continue ;
206+
207+ for (mlir::Block &b : handlerRegion.getBlocks ())
208+ eraseBlocks.push_back (&b);
209+ }
210+
211+ for (mlir::Block *b : eraseBlocks)
212+ b->erase ();
213+
214+ tryOp.setHandlerTypesAttr ({});
215+ return ;
216+ }
217+
218+ cgm.errorNYI (" exitCXXTryStmt: Required catch" );
74219}
0 commit comments