3939import io .cdap .cdap .api .data .format .StructuredRecord ;
4040import io .cdap .cdap .api .data .schema .Schema ;
4141import io .cdap .cdap .api .dataset .lib .KeyValue ;
42+ import io .cdap .cdap .api .exception .ErrorType ;
4243import io .cdap .cdap .api .exception .ProgramFailureException ;
4344import io .cdap .cdap .etl .api .Emitter ;
4445import io .cdap .cdap .etl .api .FailureCollector ;
5152import io .cdap .cdap .etl .api .engine .sql .SQLEngineInput ;
5253import io .cdap .cdap .etl .api .exception .ErrorContext ;
5354import io .cdap .cdap .etl .api .exception .ErrorDetailsProviderSpec ;
54- import io .cdap .cdap .etl .api .exception .ErrorPhase ;
5555import io .cdap .cdap .etl .api .validation .ValidationFailure ;
5656import io .cdap .plugin .common .Asset ;
5757import io .cdap .plugin .common .LineageRecorder ;
6262import io .cdap .plugin .gcp .bigquery .util .BigQueryConstants ;
6363import io .cdap .plugin .gcp .bigquery .util .BigQueryUtil ;
6464import io .cdap .plugin .gcp .common .CmekUtils ;
65+ import io .cdap .plugin .gcp .common .GCPErrorDetailsProviderUtil ;
6566import io .cdap .plugin .gcp .common .GCPUtils ;
6667import org .apache .avro .generic .GenericData ;
6768import org .apache .hadoop .conf .Configuration ;
@@ -156,9 +157,9 @@ public void prepareRun(BatchSourceContext context) throws Exception {
156157 bigQuery = GCPUtils .getBigQuery (config .getProject (), credentials , null );
157158 dataset = bigQuery .getDataset (DatasetId .of (config .getDatasetProject (), config .getDataset ()));
158159 } catch (Exception e ) {
159- ProgramFailureException ex = new BigQueryErrorDetailsProvider (). getExceptionDetails (e ,
160- new ErrorContext ( ErrorPhase . READING ));
161- throw ex == null ? e : ex ;
160+ throw GCPErrorDetailsProviderUtil . getHttpResponseExceptionDetailsFromChain (e ,
161+ String . format ( "Unable to get BQ dataset '%s' details" , config . getDataset ()),
162+ ErrorType . UNKNOWN , false , GCPUtils . BQ_SUPPORTED_DOC_URL ) ;
162163 }
163164
164165 // Get Configuration for this run
@@ -180,16 +181,8 @@ public void prepareRun(BatchSourceContext context) throws Exception {
180181
181182 // Configure GCS Bucket to use
182183 Storage storage = GCPUtils .getStorage (config .getProject (), credentials );;
183- String bucket = null ;
184- try {
185- bucket = BigQuerySourceUtils .getOrCreateBucket (configuration , storage , bucketName , dataset ,
186- bucketPath , cmekKeyName );
187- } catch (Exception e ) {
188- String errorReason = "Failed to create bucket." ;
189- collector .addFailure (String .format ("%s %s" , errorReason , e .getMessage ()), null )
190- .withStacktrace (e .getStackTrace ());
191- collector .getOrThrowException ();
192- }
184+ String bucket = BigQuerySourceUtils .getOrCreateBucket (configuration , storage , bucketName , dataset ,
185+ bucketPath , cmekKeyName );
193186
194187 // Configure Service account credentials
195188 BigQuerySourceUtils .configureServiceAccount (configuration , config .getConnection ());
0 commit comments