Skip to content

Commit ef95339

Browse files
committed
resolve comments
1 parent dc985bd commit ef95339

File tree

1 file changed

+17
-17
lines changed

1 file changed

+17
-17
lines changed

articles/synapse-analytics/spark/synapse-spark-sql-pool-import-export.md

Lines changed: 17 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -274,7 +274,7 @@ dfToReadFromTable.show()
274274
> * Table name and query cannot be specified at the same time.
275275
> * Only select queries are allowed. DDL and DML SQLs are not allowed.
276276
> * The select and filter options on dataframe are not pushed down to the SQL dedicated pool when a query is specified.
277-
> * Read from a query is only available in Spark 3. It is not available in Spark 2.4.
277+
> * Read from a query is only available in Spark 3.1 and 3.2. It is not available in Spark 2.4.
278278
279279
##### [Scala](#tab/scala2)
280280

@@ -288,7 +288,7 @@ import org.apache.spark.sql.SqlAnalyticsConnector._
288288

289289
// Read from a query
290290
// Query can be provided either as an argument to synapsesql or as a Constant - Constants.QUERY
291-
val dfToReadFromQuery1:DataFrame = spark.read.
291+
val dfToReadFromQueryAsOption:DataFrame = spark.read.
292292
// Name of the SQL Dedicated Pool or database where to run the query
293293
// Database can be specified as a Spark Config - spark.sqlanalyticsconnector.dw.database or as a Constant - Constants.DATABASE
294294
option(Constants.DATABASE, "<database_name>").
@@ -301,7 +301,7 @@ val dfToReadFromQuery1:DataFrame = spark.read.
301301
.option(Constants.QUERY, "select <column_name>, count(*) as cnt from <schema_name>.<table_name> group by <column_name>")
302302
synapsesql()
303303

304-
val dfToReadFromQuery2:DataFrame = spark.read.
304+
val dfToReadFromQueryAsArgument:DataFrame = spark.read.
305305
// Name of the SQL Dedicated Pool or database where to run the query
306306
// Database can be specified as a Spark Config - spark.sqlanalyticsconnector.dw.database or as a Constant - Constants.DATABASE
307307
option(Constants.DATABASE, "<database_name>")
@@ -315,8 +315,8 @@ val dfToReadFromQuery2:DataFrame = spark.read.
315315

316316

317317
//Show contents of the dataframe
318-
dfToReadFromQuery1.show()
319-
dfToReadFromQuery2.show()
318+
dfToReadFromQueryAsOption.show()
319+
dfToReadFromQueryAsArgument.show()
320320
```
321321

322322
##### [Python](#tab/python2)
@@ -333,7 +333,7 @@ spark.conf.set("spark.sqlanalyticsconnector.dw.database", "<database_name>")
333333

334334
# Read from a query
335335
# Query can be provided either as an argument to synapsesql or as a Constant - Constants.QUERY
336-
dfToReadFromQuery1 = (spark.read
336+
dfToReadFromQueryAsOption = (spark.read
337337
# Name of the SQL Dedicated Pool or database where to run the query
338338
# Database can be specified as a Spark Config - spark.sqlanalyticsconnector.dw.database or as a Constant - Constants.DATABASE
339339
.option(Constants.DATABASE, "<database_name>")
@@ -347,7 +347,7 @@ dfToReadFromQuery1 = (spark.read
347347
.synapsesql()
348348
)
349349

350-
dfToReadFromQuery2 = (spark.read
350+
dfToReadFromQueryAsArgument = (spark.read
351351
# Name of the SQL Dedicated Pool or database where to run the query
352352
# Database can be specified as a Spark Config - spark.sqlanalyticsconnector.dw.database or as a Constant - Constants.DATABASE
353353
.option(Constants.DATABASE, "<database_name>")
@@ -361,8 +361,8 @@ dfToReadFromQuery2 = (spark.read
361361
)
362362

363363
# Show contents of the dataframe
364-
dfToReadFromQuery1.show()
365-
dfToReadFromQuery2.show()
364+
dfToReadFromQueryAsOption.show()
365+
dfToReadFromQueryAsArgument.show()
366366
```
367367
---
368368

@@ -457,7 +457,7 @@ spark.conf.set("spark.sqlanalyticsconnector.dw.database", "<database_name>")
457457

458458
// Read from a query
459459
// Query can be provided either as an argument to synapsesql or as a Constant - Constants.QUERY
460-
val dfToReadFromQuery1:DataFrame = spark.read.
460+
val dfToReadFromQueryAsOption:DataFrame = spark.read.
461461
//Name of the SQL Dedicated Pool or database where to run the query
462462
//Database can be specified as a Spark Config - spark.sqlanalyticsconnector.dw.database or as a Constant - Constants.DATABASE
463463
option(Constants.DATABASE, "<database_name>").
@@ -475,7 +475,7 @@ val dfToReadFromQuery1:DataFrame = spark.read.
475475
option(Constants.QUERY, "select <column_name>, count(*) as counts from <schema_name>.<table_name> group by <column_name>" ).
476476
synapsesql()
477477

478-
val dfToReadFromTQuery2:DataFrame = spark.read.
478+
val dfToReadFromQueryAsArgument:DataFrame = spark.read.
479479
//Name of the SQL Dedicated Pool or database where to run the query
480480
//Database can be specified as a Spark Config - spark.sqlanalyticsconnector.dw.database or as a Constant - Constants.DATABASE
481481
option(Constants.DATABASE, "<database_name>").
@@ -494,8 +494,8 @@ val dfToReadFromTQuery2:DataFrame = spark.read.
494494

495495

496496
//Show contents of the dataframe
497-
dfToReadFromQuery1.show()
498-
dfToReadFromQuery2.show()
497+
dfToReadFromQueryAsOption.show()
498+
dfToReadFromQueryAsArgument.show()
499499
```
500500

501501
##### [Python](#tab/python4)
@@ -512,7 +512,7 @@ spark.conf.set("spark.sqlanalyticsconnector.dw.database", "<database_name>")
512512

513513
# Read from a query
514514
# Query can be provided either as an argument to synapsesql or as a Constant - Constants.QUERY
515-
dfToReadFromQuery1 = (spark.read
515+
dfToReadFromQueryAsOption = (spark.read
516516
# Name of the SQL Dedicated Pool or database where to run the query
517517
# Database can be specified as a Spark Config - spark.sqlanalyticsconnector.dw.database or as a Constant - Constants.DATABASE
518518
.option(Constants.DATABASE, "<database_name>")
@@ -532,7 +532,7 @@ dfToReadFromQuery1 = (spark.read
532532
.synapsesql()
533533
)
534534

535-
dfToReadFromQuery2 = (spark.read
535+
dfToReadFromQueryAsArgument = (spark.read
536536
# Name of the SQL Dedicated Pool or database where to run the query
537537
# Database can be specified as a Spark Config - spark.sqlanalyticsconnector.dw.database or as a Constant - Constants.DATABASE
538538
.option(Constants.DATABASE, "<database_name>")
@@ -551,8 +551,8 @@ dfToReadFromQuery2 = (spark.read
551551
)
552552

553553
# Show contents of the dataframe
554-
dfToReadFromQuery1.show()
555-
dfToReadFromQuery2.show()
554+
dfToReadFromQueryAsOption.show()
555+
dfToReadFromQueryAsArgument.show()
556556

557557
```
558558
---

0 commit comments

Comments
 (0)