Skip to content

Commit e90c470

Browse files
authored
Merge pull request #90956 from djpmsft/docUpdates
Switch Activity Documentation
2 parents 042e7eb + 74c9374 commit e90c470

File tree

2 files changed

+339
-0
lines changed

2 files changed

+339
-0
lines changed

articles/data-factory/TOC.yml

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -432,6 +432,8 @@
432432
href: control-flow-get-metadata-activity.md
433433
- name: If Condition Activity
434434
href: control-flow-if-condition-activity.md
435+
- name: Switch Activity
436+
href: control-flow-switch-activity.md
435437
- name: Lookup Activity
436438
href: control-flow-lookup-activity.md
437439
- name: Set Variable Activity
Lines changed: 337 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,337 @@
1+
---
2+
title: Switch activity in Azure Data Factory | Microsoft Docs
3+
description: The Switch activity allows you to control the processing flow based on a condition.
4+
services: data-factory
5+
author: djpmsft
6+
ms.author: daperlov
7+
ms.reviewer: maghan
8+
ms.service: data-factory
9+
ms.workload: data-services
10+
ms.topic: conceptual
11+
ms.date: 10/08/2019
12+
---
13+
14+
# Switch activity in Azure Data Factory
15+
16+
The Switch activity provides the same functionality that a switch statement provides in programming languages. It evaluates a set of activities corresponding to a case that matches the condition evaluation.
17+
18+
## Syntax
19+
20+
```json
21+
22+
{
23+
"name": "<Name of the activity>",
24+
"type": "Switch",
25+
"typeProperties": {
26+
"expression": {
27+
"value": "<expression that evaluates to some string value>",
28+
"type": "Expression"
29+
},
30+
"cases": [
31+
{
32+
"value": "<string value that matches expression evaluation>",
33+
"activities": [
34+
{
35+
"<Activity 1 definition>"
36+
},
37+
{
38+
"<Activity 2 definition>"
39+
},
40+
{
41+
"<Activity N definition>"
42+
}
43+
]
44+
}
45+
],
46+
"defaultActivities": [
47+
{
48+
"<Activity 1 definition>"
49+
},
50+
{
51+
"<Activity 2 definition>"
52+
},
53+
{
54+
"<Activity N definition>"
55+
}
56+
]
57+
}
58+
}
59+
```
60+
61+
## Type properties
62+
63+
Property | Description | Allowed values | Required
64+
-------- | ----------- | -------------- | --------
65+
name | Name of the switch activity. | String | Yes
66+
type | Must be set to *Switch** | String | Yes
67+
expression | Expression that must evaluate to string value | Expression with result type string | Yes
68+
cases | Set of cases that contain a value and a set of activities to execute when the value matches the expression evaluation. Must provide at least one case. There's a max limit of 25 cases. | Array of Case Objects | Yes
69+
defaultActivities | Set of activities that are executed when the expression evaluation isn't satisfied. | Array of Activities | Yes
70+
71+
## Example
72+
73+
The pipeline in this example copies data from an input folder to an output folder. The output folder is determined by the value of pipeline parameter: routeSelection.
74+
75+
> [!NOTE]
76+
> This section provides JSON definitions and sample PowerShell commands to run the pipeline. For a walkthrough with step-by-step instructions to create a Data Factory pipeline by using Azure PowerShell and JSON definitions, see [tutorial: create a data factory by using Azure PowerShell](quickstart-create-data-factory-powershell.md).
77+
78+
### Pipeline with Switch activity (Adfv2QuickStartPipeline.json)
79+
80+
```json
81+
{
82+
"name": "Adfv2QuickStartPipeline",
83+
"properties": {
84+
"activities": [
85+
{
86+
"name": "MySwitch",
87+
"type": "Switch",
88+
"typeProperties": {
89+
"expression": {
90+
"value": "@pipeline().parameters.routeSelection",
91+
"type": "Expression"
92+
},
93+
"cases": [
94+
{
95+
"value": "1",
96+
"activities": [
97+
{
98+
"name": "CopyFromBlobToBlob1",
99+
"type": "Copy",
100+
"inputs": [
101+
{
102+
"referenceName": "BlobDataset",
103+
"parameters": {
104+
"path": "@pipeline().parameters.inputPath"
105+
},
106+
"type": "DatasetReference"
107+
}
108+
],
109+
"outputs": [
110+
{
111+
"referenceName": "BlobDataset",
112+
"parameters": {
113+
"path": "@pipeline().parameters.outputPath1",
114+
},
115+
"type": "DatasetReference"
116+
}
117+
],
118+
"typeProperties": {
119+
"source": {
120+
"type": "BlobSource"
121+
},
122+
"sink": {
123+
"type": "BlobSink"
124+
}
125+
}
126+
}
127+
]
128+
},
129+
{
130+
"value": "2",
131+
"activities": [
132+
{
133+
"name": "CopyFromBlobToBlob2",
134+
"type": "Copy",
135+
"inputs": [
136+
{
137+
"referenceName": "BlobDataset",
138+
"parameters": {
139+
"path": "@pipeline().parameters.inputPath",
140+
},
141+
"type": "DatasetReference"
142+
}
143+
],
144+
"outputs": [
145+
{
146+
"referenceName": "BlobDataset",
147+
"parameters": {
148+
"path": "@pipeline().parameters.outputPath2",
149+
},
150+
"type": "DatasetReference"
151+
}
152+
],
153+
"typeProperties": {
154+
"source": {
155+
"type": "BlobSource"
156+
},
157+
"sink": {
158+
"type": "BlobSink"
159+
}
160+
}
161+
}
162+
]
163+
},
164+
{
165+
"value": "3",
166+
"activities": [
167+
{
168+
"name": "CopyFromBlobToBlob3",
169+
"type": "Copy",
170+
"inputs": [
171+
{
172+
"referenceName": "BlobDataset",
173+
"parameters": {
174+
"path": "@pipeline().parameters.inputPath",
175+
},
176+
"type": "DatasetReference"
177+
}
178+
],
179+
"outputs": [
180+
{
181+
"referenceName": "BlobDataset",
182+
"parameters": {
183+
"path": "@pipeline().parameters.outputPath3",
184+
},
185+
"type": "DatasetReference"
186+
}
187+
],
188+
"typeProperties": {
189+
"source": {
190+
"type": "BlobSource"
191+
},
192+
"sink": {
193+
"type": "BlobSink"
194+
}
195+
}
196+
}
197+
]
198+
},
199+
],
200+
"defaultActivities": []
201+
}
202+
}
203+
],
204+
"parameters": {
205+
"inputPath": {
206+
"type": "String"
207+
},
208+
"outputPath1": {
209+
"type": "String"
210+
},
211+
"outputPath2": {
212+
"type": "String"
213+
},
214+
"outputPath3": {
215+
"type": "String"
216+
},
217+
"routeSelection": {
218+
"type": "String"
219+
}
220+
}
221+
}
222+
}
223+
224+
```
225+
226+
### Azure Storage linked service (AzureStorageLinkedService.json)
227+
228+
```json
229+
{
230+
"name": "AzureStorageLinkedService",
231+
"properties": {
232+
"type": "AzureStorage",
233+
"typeProperties": {
234+
"connectionString": {
235+
"value": "DefaultEndpointsProtocol=https;AccountName=<Azure Storage account name>;AccountKey=<Azure Storage account key>",
236+
"type": "SecureString"
237+
}
238+
}
239+
}
240+
}
241+
```
242+
243+
### Parameterized Azure Blob dataset (BlobDataset.json)
244+
245+
The pipeline sets the **folderPath** to the value of either **outputPath1** or **outputPath2** parameter of the pipeline.
246+
247+
```json
248+
{
249+
"name": "BlobDataset",
250+
"properties": {
251+
"type": "AzureBlob",
252+
"typeProperties": {
253+
"folderPath": {
254+
"value": "@{dataset().path}",
255+
"type": "Expression"
256+
}
257+
},
258+
"linkedServiceName": {
259+
"referenceName": "AzureStorageLinkedService",
260+
"type": "LinkedServiceReference"
261+
},
262+
"parameters": {
263+
"path": {
264+
"type": "String"
265+
}
266+
}
267+
}
268+
}
269+
```
270+
271+
### Pipeline parameter JSON (PipelineParameters.json)
272+
273+
```json
274+
{
275+
"inputPath": "adftutorial/input",
276+
"outputPath1": "adftutorial/outputCase1",
277+
"outputPath2": "adftutorial/outputCase2",
278+
"outputPath2": "adftutorial/outputCase3",
279+
"routeSelection": "1"
280+
}
281+
```
282+
283+
### PowerShell commands
284+
285+
[!INCLUDE [updated-for-az](../../includes/updated-for-az.md)]
286+
287+
These commands assume that you've saved the JSON files into the folder: C:\ADF.
288+
289+
```powershell
290+
Connect-AzAccount
291+
Select-AzSubscription "<Your subscription name>"
292+
293+
$resourceGroupName = "<Resource Group Name>"
294+
$dataFactoryName = "<Data Factory Name. Must be globally unique>";
295+
Remove-AzDataFactoryV2 $dataFactoryName -ResourceGroupName $resourceGroupName -force
296+
297+
298+
Set-AzDataFactoryV2 -ResourceGroupName $resourceGroupName -Location "East US" -Name $dataFactoryName
299+
Set-AzDataFactoryV2LinkedService -DataFactoryName $dataFactoryName -ResourceGroupName $resourceGroupName -Name "AzureStorageLinkedService" -DefinitionFile "C:\ADF\AzureStorageLinkedService.json"
300+
Set-AzDataFactoryV2Dataset -DataFactoryName $dataFactoryName -ResourceGroupName $resourceGroupName -Name "BlobDataset" -DefinitionFile "C:\ADF\BlobDataset.json"
301+
Set-AzDataFactoryV2Pipeline -DataFactoryName $dataFactoryName -ResourceGroupName $resourceGroupName -Name "Adfv2QuickStartPipeline" -DefinitionFile "C:\ADF\Adfv2QuickStartPipeline.json"
302+
$runId = Invoke-AzDataFactoryV2Pipeline -DataFactoryName $dataFactoryName -ResourceGroupName $resourceGroupName -PipelineName "Adfv2QuickStartPipeline" -ParameterFile C:\ADF\PipelineParameters.json
303+
while ($True) {
304+
$run = Get-AzDataFactoryV2PipelineRun -ResourceGroupName $resourceGroupName -DataFactoryName $DataFactoryName -PipelineRunId $runId
305+
306+
if ($run) {
307+
if ($run.Status -ne 'InProgress') {
308+
Write-Host "Pipeline run finished. The status is: " $run.Status -foregroundcolor "Yellow"
309+
$run
310+
break
311+
}
312+
Write-Host "Pipeline is running...status: InProgress" -foregroundcolor "Yellow"
313+
}
314+
315+
Start-Sleep -Seconds 30
316+
}
317+
Write-Host "Activity run details:" -foregroundcolor "Yellow"
318+
$result = Get-AzDataFactoryV2ActivityRun -DataFactoryName $dataFactoryName -ResourceGroupName $resourceGroupName -PipelineRunId $runId -RunStartedAfter (Get-Date).AddMinutes(-30) -RunStartedBefore (Get-Date).AddMinutes(30)
319+
$result
320+
321+
Write-Host "Activity 'Output' section:" -foregroundcolor "Yellow"
322+
$result.Output -join "`r`n"
323+
324+
Write-Host "\nActivity 'Error' section:" -foregroundcolor "Yellow"
325+
$result.Error -join "`r`n"
326+
```
327+
328+
## Next steps
329+
330+
See other control flow activities supported by Data Factory:
331+
332+
- [If Condition Activity](control-flow-if-condition-activity.md)
333+
- [Execute Pipeline Activity](control-flow-execute-pipeline-activity.md)
334+
- [For Each Activity](control-flow-for-each-activity.md)
335+
- [Get Metadata Activity](control-flow-get-metadata-activity.md)
336+
- [Lookup Activity](control-flow-lookup-activity.md)
337+
- [Web Activity](control-flow-web-activity.md)

0 commit comments

Comments
 (0)