@@ -274,7 +274,7 @@ dfToReadFromTable.show()
274
274
> * Table name and query cannot be specified at the same time.
275
275
> * Only select queries are allowed. DDL and DML SQLs are not allowed.
276
276
> * The select and filter options on dataframe are not pushed down to the SQL dedicated pool when a query is specified.
277
- > * Read from a query is only available in Spark 3. It is not available in Spark 2.4.
277
+ > * Read from a query is only available in Spark 3.1 and 3.2. It is not available in Spark 2.4.
278
278
279
279
##### [ Scala] ( #tab/scala2 )
280
280
@@ -288,7 +288,7 @@ import org.apache.spark.sql.SqlAnalyticsConnector._
288
288
289
289
// Read from a query
290
290
// Query can be provided either as an argument to synapsesql or as a Constant - Constants.QUERY
291
- val dfToReadFromQuery1 : DataFrame = spark.read.
291
+ val dfToReadFromQueryAsOption : DataFrame = spark.read.
292
292
// Name of the SQL Dedicated Pool or database where to run the query
293
293
// Database can be specified as a Spark Config - spark.sqlanalyticsconnector.dw.database or as a Constant - Constants.DATABASE
294
294
option(Constants .DATABASE , " <database_name>" ).
@@ -301,7 +301,7 @@ val dfToReadFromQuery1:DataFrame = spark.read.
301
301
.option(Constants .QUERY , " select <column_name>, count(*) as cnt from <schema_name>.<table_name> group by <column_name>" )
302
302
synapsesql()
303
303
304
- val dfToReadFromQuery2 : DataFrame = spark.read.
304
+ val dfToReadFromQueryAsArgument : DataFrame = spark.read.
305
305
// Name of the SQL Dedicated Pool or database where to run the query
306
306
// Database can be specified as a Spark Config - spark.sqlanalyticsconnector.dw.database or as a Constant - Constants.DATABASE
307
307
option(Constants .DATABASE , " <database_name>" )
@@ -315,8 +315,8 @@ val dfToReadFromQuery2:DataFrame = spark.read.
315
315
316
316
317
317
// Show contents of the dataframe
318
- dfToReadFromQuery1 .show()
319
- dfToReadFromQuery2 .show()
318
+ dfToReadFromQueryAsOption .show()
319
+ dfToReadFromQueryAsArgument .show()
320
320
```
321
321
322
322
##### [ Python] ( #tab/python2 )
@@ -333,7 +333,7 @@ spark.conf.set("spark.sqlanalyticsconnector.dw.database", "<database_name>")
333
333
334
334
# Read from a query
335
335
# Query can be provided either as an argument to synapsesql or as a Constant - Constants.QUERY
336
- dfToReadFromQuery1 = (spark.read
336
+ dfToReadFromQueryAsOption = (spark.read
337
337
# Name of the SQL Dedicated Pool or database where to run the query
338
338
# Database can be specified as a Spark Config - spark.sqlanalyticsconnector.dw.database or as a Constant - Constants.DATABASE
339
339
.option(Constants.DATABASE , " <database_name>" )
@@ -347,7 +347,7 @@ dfToReadFromQuery1 = (spark.read
347
347
.synapsesql()
348
348
)
349
349
350
- dfToReadFromQuery2 = (spark.read
350
+ dfToReadFromQueryAsArgument = (spark.read
351
351
# Name of the SQL Dedicated Pool or database where to run the query
352
352
# Database can be specified as a Spark Config - spark.sqlanalyticsconnector.dw.database or as a Constant - Constants.DATABASE
353
353
.option(Constants.DATABASE , " <database_name>" )
@@ -361,8 +361,8 @@ dfToReadFromQuery2 = (spark.read
361
361
)
362
362
363
363
# Show contents of the dataframe
364
- dfToReadFromQuery1 .show()
365
- dfToReadFromQuery2 .show()
364
+ dfToReadFromQueryAsOption .show()
365
+ dfToReadFromQueryAsArgument .show()
366
366
```
367
367
---
368
368
@@ -457,7 +457,7 @@ spark.conf.set("spark.sqlanalyticsconnector.dw.database", "<database_name>")
457
457
458
458
// Read from a query
459
459
// Query can be provided either as an argument to synapsesql or as a Constant - Constants.QUERY
460
- val dfToReadFromQuery1 : DataFrame = spark.read.
460
+ val dfToReadFromQueryAsOption : DataFrame = spark.read.
461
461
// Name of the SQL Dedicated Pool or database where to run the query
462
462
// Database can be specified as a Spark Config - spark.sqlanalyticsconnector.dw.database or as a Constant - Constants.DATABASE
463
463
option(Constants .DATABASE , " <database_name>" ).
@@ -475,7 +475,7 @@ val dfToReadFromQuery1:DataFrame = spark.read.
475
475
option(Constants .QUERY , " select <column_name>, count(*) as counts from <schema_name>.<table_name> group by <column_name>" ).
476
476
synapsesql()
477
477
478
- val dfToReadFromTQuery2 : DataFrame = spark.read.
478
+ val dfToReadFromQueryAsArgument : DataFrame = spark.read.
479
479
// Name of the SQL Dedicated Pool or database where to run the query
480
480
// Database can be specified as a Spark Config - spark.sqlanalyticsconnector.dw.database or as a Constant - Constants.DATABASE
481
481
option(Constants .DATABASE , " <database_name>" ).
@@ -494,8 +494,8 @@ val dfToReadFromTQuery2:DataFrame = spark.read.
494
494
495
495
496
496
// Show contents of the dataframe
497
- dfToReadFromQuery1 .show()
498
- dfToReadFromQuery2 .show()
497
+ dfToReadFromQueryAsOption .show()
498
+ dfToReadFromQueryAsArgument .show()
499
499
```
500
500
501
501
##### [ Python] ( #tab/python4 )
@@ -512,7 +512,7 @@ spark.conf.set("spark.sqlanalyticsconnector.dw.database", "<database_name>")
512
512
513
513
# Read from a query
514
514
# Query can be provided either as an argument to synapsesql or as a Constant - Constants.QUERY
515
- dfToReadFromQuery1 = (spark.read
515
+ dfToReadFromQueryAsOption = (spark.read
516
516
# Name of the SQL Dedicated Pool or database where to run the query
517
517
# Database can be specified as a Spark Config - spark.sqlanalyticsconnector.dw.database or as a Constant - Constants.DATABASE
518
518
.option(Constants.DATABASE , " <database_name>" )
@@ -532,7 +532,7 @@ dfToReadFromQuery1 = (spark.read
532
532
.synapsesql()
533
533
)
534
534
535
- dfToReadFromQuery2 = (spark.read
535
+ dfToReadFromQueryAsArgument = (spark.read
536
536
# Name of the SQL Dedicated Pool or database where to run the query
537
537
# Database can be specified as a Spark Config - spark.sqlanalyticsconnector.dw.database or as a Constant - Constants.DATABASE
538
538
.option(Constants.DATABASE , " <database_name>" )
@@ -551,8 +551,8 @@ dfToReadFromQuery2 = (spark.read
551
551
)
552
552
553
553
# Show contents of the dataframe
554
- dfToReadFromQuery1 .show()
555
- dfToReadFromQuery2 .show()
554
+ dfToReadFromQueryAsOption .show()
555
+ dfToReadFromQueryAsArgument .show()
556
556
557
557
```
558
558
---
0 commit comments