Skip to content

Commit a90c6fd

Browse files
authored
Merge pull request #26 from mlee3gsd/master
Update Spark to DW Connector
2 parents 8a52a78 + 13f45b5 commit a90c6fd

File tree

2 files changed

+22
-30
lines changed

2 files changed

+22
-30
lines changed

Notebooks/PySpark/03 Read and write from SQL pool table.ipynb

Lines changed: 11 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -1,12 +1,4 @@
11
{
2-
"metadata": {
3-
"saveOutput": true,
4-
"language_info": {
5-
"name": "scala"
6-
}
7-
},
8-
"nbformat": 4,
9-
"nbformat_minor": 2,
102
"cells": [
113
{
124
"cell_type": "markdown",
@@ -126,12 +118,9 @@
126118
"import com.microsoft.spark.sqlanalytics.utils.Constants\n",
127119
"\n",
128120
"val sql_pool_name = \"Your sql pool name\" //fill in your sql pool name\n",
129-
"val account_name = \"Your storage account name\" //fill in your storage account name\n",
130-
"val temp_folder = \"Your temp folder\" //fill in a container name under your storage account\n",
131121
"\n",
132-
"holiday_nodate.write.option(Constants.TEMP_FOLDER, s\"abfss://$temp_folder@$account_name.dfs.core.windows.net/\")\n",
133-
" .sqlanalytics(s\"$sql_pool_name.dbo.PublicHoliday\", Constants.INTERNAL)\n",
134-
""
122+
"holiday_nodate.write\n",
123+
" .sqlanalytics(s\"$sql_pool_name.dbo.PublicHoliday\", Constants.INTERNAL)\n"
135124
],
136125
"attachments": {}
137126
},
@@ -170,11 +159,18 @@
170159
"source": [
171160
"// Read the table we just created in the sql pool as a Spark dataframe\n",
172161
"val spark_read = spark.read.\n",
173-
" option(Constants.TEMP_FOLDER, s\"abfss://$temp_folder@$account_name.dfs.core.windows.net/\").\n",
174162
" sqlanalytics(s\"$sql_pool_name.dbo.PublicHoliday\")\n",
175163
"spark_read.show(5, truncate = false)"
176164
],
177165
"attachments": {}
178166
}
179-
]
167+
],
168+
"metadata": {
169+
"saveOutput": true,
170+
"language_info": {
171+
"name": "scala"
172+
}
173+
},
174+
"nbformat": 4,
175+
"nbformat_minor": 2
180176
}

Notebooks/Scala/03 Read and write from SQL pool table.ipynb

Lines changed: 11 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -1,12 +1,4 @@
11
{
2-
"metadata": {
3-
"saveOutput": true,
4-
"language_info": {
5-
"name": "scala"
6-
}
7-
},
8-
"nbformat": 4,
9-
"nbformat_minor": 2,
102
"cells": [
113
{
124
"cell_type": "markdown",
@@ -123,12 +115,9 @@
123115
"import com.microsoft.spark.sqlanalytics.utils.Constants\n",
124116
"\n",
125117
"val sql_pool_name = \"Your sql pool name\" //fill in your sql pool name\n",
126-
"val account_name = \"Your storage account name\" //fill in your storage account name\n",
127-
"val temp_folder = \"Your temp folder\" //fill in a container name under your storage account\n",
128118
"\n",
129-
"holiday_nodate.write.option(Constants.TEMP_FOLDER, s\"abfss://$temp_folder@$account_name.dfs.core.windows.net/\")\n",
130-
" .sqlanalytics(s\"$sql_pool_name.dbo.PublicHoliday\", Constants.INTERNAL)\n",
131-
""
119+
"holiday_nodate.write\n",
120+
" .sqlanalytics(s\"$sql_pool_name.dbo.PublicHoliday\", Constants.INTERNAL)\n"
132121
],
133122
"attachments": {}
134123
},
@@ -167,11 +156,18 @@
167156
"source": [
168157
"// Read the table we just created in the sql pool as a Spark dataframe\n",
169158
"val spark_read = spark.read.\n",
170-
" option(Constants.TEMP_FOLDER, s\"abfss://$temp_folder@$account_name.dfs.core.windows.net/\").\n",
171159
" sqlanalytics(s\"$sql_pool_name.dbo.PublicHoliday\")\n",
172160
"spark_read.show(5, truncate = false)"
173161
],
174162
"attachments": {}
175163
}
176-
]
164+
],
165+
"metadata": {
166+
"saveOutput": true,
167+
"language_info": {
168+
"name": "scala"
169+
}
170+
},
171+
"nbformat": 4,
172+
"nbformat_minor": 2
177173
}

0 commit comments

Comments
 (0)