Skip to content

Commit 49cc2d2

Browse files
committed
Merge branch 'main' of https://github.com/MicrosoftDocs/azure-docs-pr into uuf-269343
2 parents 0081916 + e5e0e75 commit 49cc2d2

18 files changed

+135
-100
lines changed

articles/baremetal-infrastructure/workloads/nc2-on-azure/architecture.md

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -118,6 +118,7 @@ When planning your NC2 on Azure design, use the following table to understand wh
118118
| Japan East | AN36P |
119119
| North Central US | AN36P |
120120
| Southeast Asia | AN36P |
121+
| UAE North | AN36P |
121122
| UK South | AN36P |
122123
| West Europe | AN36P |
123124
| West US 2 | AN36 |

articles/event-grid/event-schema-blob-storage.md

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -101,6 +101,8 @@ These events are triggered when a client creates, replaces, or deletes a blob by
101101
"contentType": "image/jpeg",
102102
"contentLength": 105891,
103103
"blobType": "BlockBlob",
104+
"accessTier": "Archive",
105+
"previousTier": "Cool",
104106
"url": "https://my-storage-account.blob.core.windows.net/testcontainer/Auto.jpg",
105107
"sequencer": "000000000000000000000000000089A4000000000018d6ea",
106108
"storageDiagnostics": {
@@ -208,6 +210,8 @@ These events are triggered when a client creates, replaces, or deletes a blob by
208210
"contentType": "image/jpeg",
209211
"contentLength": 105891,
210212
"blobType": "BlockBlob",
213+
"accessTier": "Archive",
214+
"previousTier": "Cool",
211215
"url": "https://my-storage-account.blob.core.windows.net/testcontainer/Auto.jpg",
212216
"sequencer": "000000000000000000000000000089A4000000000018d6ea",
213217
"storageDiagnostics": {
@@ -1246,6 +1250,8 @@ The data object has the following properties:
12461250
| `contentType` | string | The content type specified for the blob. |
12471251
| `contentLength` | integer | The size of the blob in bytes. |
12481252
| `blobType` | string | The type of blob. Valid values are either "BlockBlob" or "PageBlob". |
1253+
| `accessTier` | string | The target tier of the blob. Appears only for the event BlobTierChanged. |
1254+
| `previousTier` | string | The source tier of the blob. Appears only for the event BlobTierChanged. If the blob is inferring the tier from the storage account, this field will not appear. |
12491255
| `contentOffset` | number | The offset in bytes of a write operation taken at the point where the event-triggering application completed writing to the file. <br>Appears only for events triggered on blob storage accounts that have a hierarchical namespace.|
12501256
| `destinationUrl` |string | The url of the file that will exist after the operation completes. For example, if a file is renamed, the `destinationUrl` property contains the url of the new file name. <br>Appears only for events triggered on blob storage accounts that have a hierarchical namespace.|
12511257
| `sourceUrl` |string | The url of the file that exists before the operation is done. For example, if a file is renamed, the `sourceUrl` contains the url of the original file name before the rename operation. <br>Appears only for events triggered on blob storage accounts that have a hierarchical namespace. |

articles/event-hubs/event-hubs-go-get-started-send.md

Lines changed: 112 additions & 98 deletions
Original file line numberDiff line numberDiff line change
@@ -49,47 +49,58 @@ Here's the code to send events to an event hub. The main steps in the code are:
4949
package main
5050

5151
import (
52-
"context"
52+
"context"
5353

54-
"github.com/Azure/azure-sdk-for-go/sdk/messaging/azeventhubs"
54+
"github.com/Azure/azure-sdk-for-go/sdk/messaging/azeventhubs"
5555
)
5656

5757
func main() {
58+
// create an Event Hubs producer client using a connection string to the namespace and the event hub
59+
producerClient, err := azeventhubs.NewProducerClientFromConnectionString("NAMESPACE CONNECTION STRING", "EVENT HUB NAME", nil)
5860

59-
// create an Event Hubs producer client using a connection string to the namespace and the event hub
60-
producerClient, err := azeventhubs.NewProducerClientFromConnectionString("NAMESPACE CONNECTION STRING", "EVENT HUB NAME", nil)
61+
if err != nil {
62+
panic(err)
63+
}
6164

62-
if err != nil {
63-
panic(err)
64-
}
65+
defer producerClient.Close(context.TODO())
6566

66-
defer producerClient.Close(context.TODO())
67+
// create sample events
68+
events := createEventsForSample()
6769

68-
// create sample events
69-
events := createEventsForSample()
70+
// create a batch object and add sample events to the batch
71+
newBatchOptions := &azeventhubs.EventDataBatchOptions{}
7072

71-
// create a batch object and add sample events to the batch
72-
newBatchOptions := &azeventhubs.EventDataBatchOptions{}
73+
batch, err := producerClient.NewEventDataBatch(context.TODO(), newBatchOptions)
7374

74-
batch, err := producerClient.NewEventDataBatch(context.TODO(), newBatchOptions)
75+
if err != nil {
76+
panic(err)
77+
}
7578

76-
for i := 0; i < len(events); i++ {
77-
err = batch.AddEventData(events[i], nil)
78-
}
79+
for i := 0; i < len(events); i++ {
80+
err = batch.AddEventData(events[i], nil)
7981

80-
// send the batch of events to the event hub
81-
producerClient.SendEventDataBatch(context.TODO(), batch, nil)
82+
if err != nil {
83+
panic(err)
84+
}
85+
}
86+
87+
// send the batch of events to the event hub
88+
err = producerClient.SendEventDataBatch(context.TODO(), batch, nil)
89+
90+
if err != nil {
91+
panic(err)
92+
}
8293
}
8394

8495
func createEventsForSample() []*azeventhubs.EventData {
85-
return []*azeventhubs.EventData{
86-
{
87-
Body: []byte("hello"),
88-
},
89-
{
90-
Body: []byte("world"),
91-
},
92-
}
96+
return []*azeventhubs.EventData{
97+
{
98+
Body: []byte("hello"),
99+
},
100+
{
101+
Body: []byte("world"),
102+
},
103+
}
93104
}
94105
```
95106

@@ -134,101 +145,104 @@ Here's the code to receive events from an event hub. The main steps in the code
134145
package main
135146

136147
import (
137-
"context"
138-
"errors"
139-
"fmt"
140-
"time"
141-
142-
"github.com/Azure/azure-sdk-for-go/sdk/messaging/azeventhubs"
143-
"github.com/Azure/azure-sdk-for-go/sdk/messaging/azeventhubs/checkpoints"
144-
"github.com/Azure/azure-sdk-for-go/sdk/storage/azblob/container"
148+
"context"
149+
"errors"
150+
"fmt"
151+
"time"
152+
153+
"github.com/Azure/azure-sdk-for-go/sdk/messaging/azeventhubs"
154+
"github.com/Azure/azure-sdk-for-go/sdk/messaging/azeventhubs/checkpoints"
155+
"github.com/Azure/azure-sdk-for-go/sdk/storage/azblob/container"
145156
)
146157

147158
func main() {
148159

149-
// create a container client using a connection string and container name
150-
checkClient, err := container.NewClientFromConnectionString("AZURE STORAGE CONNECTION STRING", "CONTAINER NAME", nil)
151-
152-
// create a checkpoint store that will be used by the event hub
153-
checkpointStore, err := checkpoints.NewBlobStore(checkClient, nil)
160+
// create a container client using a connection string and container name
161+
checkClient, err := container.NewClientFromConnectionString("AZURE STORAGE CONNECTION STRING", "CONTAINER NAME", nil)
162+
163+
if err != nil {
164+
panic(err)
165+
}
154166

155-
if err != nil {
156-
panic(err)
157-
}
167+
// create a checkpoint store that will be used by the event hub
168+
checkpointStore, err := checkpoints.NewBlobStore(checkClient, nil)
158169

159-
// create a consumer client using a connection string to the namespace and the event hub
160-
consumerClient, err := azeventhubs.NewConsumerClientFromConnectionString("NAMESPACE CONNECTION STRING", "EVENT HUB NAME", azeventhubs.DefaultConsumerGroup, nil)
170+
if err != nil {
171+
panic(err)
172+
}
161173

162-
if err != nil {
163-
panic(err)
164-
}
174+
// create a consumer client using a connection string to the namespace and the event hub
175+
consumerClient, err := azeventhubs.NewConsumerClientFromConnectionString("NAMESPACE CONNECTION STRING", "EVENT HUB NAME", azeventhubs.DefaultConsumerGroup, nil)
165176

166-
defer consumerClient.Close(context.TODO())
177+
if err != nil {
178+
panic(err)
179+
}
167180

168-
// create a processor to receive and process events
169-
processor, err := azeventhubs.NewProcessor(consumerClient, checkpointStore, nil)
181+
defer consumerClient.Close(context.TODO())
170182

171-
if err != nil {
172-
panic(err)
173-
}
183+
// create a processor to receive and process events
184+
processor, err := azeventhubs.NewProcessor(consumerClient, checkpointStore, nil)
174185

175-
// for each partition in the event hub, create a partition client with processEvents as the function to process events
176-
dispatchPartitionClients := func() {
177-
for {
178-
partitionClient := processor.NextPartitionClient(context.TODO())
186+
if err != nil {
187+
panic(err)
188+
}
179189

180-
if partitionClient == nil {
181-
break
182-
}
190+
// for each partition in the event hub, create a partition client with processEvents as the function to process events
191+
dispatchPartitionClients := func() {
192+
for {
193+
partitionClient := processor.NextPartitionClient(context.TODO())
183194

184-
go func() {
185-
if err := processEvents(partitionClient); err != nil {
186-
panic(err)
187-
}
188-
}()
189-
}
190-
}
195+
if partitionClient == nil {
196+
break
197+
}
191198

192-
// run all partition clients
193-
go dispatchPartitionClients()
199+
go func() {
200+
if err := processEvents(partitionClient); err != nil {
201+
panic(err)
202+
}
203+
}()
204+
}
205+
}
194206

195-
processorCtx, processorCancel := context.WithCancel(context.TODO())
196-
defer processorCancel()
207+
// run all partition clients
208+
go dispatchPartitionClients()
197209

198-
if err := processor.Run(processorCtx); err != nil {
199-
panic(err)
200-
}
210+
processorCtx, processorCancel := context.WithCancel(context.TODO())
211+
defer processorCancel()
212+
213+
if err := processor.Run(processorCtx); err != nil {
214+
panic(err)
215+
}
201216
}
202217

203218
func processEvents(partitionClient *azeventhubs.ProcessorPartitionClient) error {
204-
defer closePartitionResources(partitionClient)
205-
for {
206-
receiveCtx, receiveCtxCancel := context.WithTimeout(context.TODO(), time.Minute)
207-
events, err := partitionClient.ReceiveEvents(receiveCtx, 100, nil)
208-
receiveCtxCancel()
209-
210-
if err != nil && !errors.Is(err, context.DeadlineExceeded) {
211-
return err
212-
}
213-
214-
fmt.Printf("Processing %d event(s)\n", len(events))
215-
216-
for _, event := range events {
217-
fmt.Printf("Event received with body %v\n", string(event.Body))
218-
}
219-
220-
if len(events) != 0 {
221-
if err := partitionClient.UpdateCheckpoint(context.TODO(), events[len(events)-1]); err != nil {
222-
return err
223-
}
224-
}
225-
}
219+
defer closePartitionResources(partitionClient)
220+
for {
221+
receiveCtx, receiveCtxCancel := context.WithTimeout(context.TODO(), time.Minute)
222+
events, err := partitionClient.ReceiveEvents(receiveCtx, 100, nil)
223+
receiveCtxCancel()
224+
225+
if err != nil && !errors.Is(err, context.DeadlineExceeded) {
226+
return err
227+
}
228+
229+
fmt.Printf("Processing %d event(s)\n", len(events))
230+
231+
for _, event := range events {
232+
fmt.Printf("Event received with body %v\n", string(event.Body))
233+
}
234+
235+
if len(events) != 0 {
236+
if err := partitionClient.UpdateCheckpoint(context.TODO(), events[len(events)-1], nil); err != nil {
237+
return err
238+
}
239+
}
240+
}
226241
}
227242

228243
func closePartitionResources(partitionClient *azeventhubs.ProcessorPartitionClient) {
229-
defer partitionClient.Close(context.TODO())
244+
defer partitionClient.Close(context.TODO())
230245
}
231-
232246
```
233247

234248
## Run receiver and sender apps

articles/iot-operations/connect-to-cloud/concept-dataflow-conversions.md

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -8,6 +8,7 @@ ms.topic: concept-article
88
ms.date: 08/03/2024
99

1010
#CustomerIntent: As an operator, I want to understand how to use dataflow conversions to transform data.
11+
ms.service: azure-iot-operations
1112
---
1213

1314
# Convert data by using dataflow conversions

articles/iot-operations/connect-to-cloud/concept-dataflow-enrich.md

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -8,6 +8,7 @@ ms.topic: concept-article
88
ms.date: 08/13/2024
99

1010
#CustomerIntent: As an operator, I want to understand how to create a dataflow to enrich data sent to endpoints.
11+
ms.service: azure-iot-operations
1112
---
1213

1314
# Enrich data by using dataflows

articles/iot-operations/connect-to-cloud/concept-dataflow-mapping.md

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -9,6 +9,7 @@ ms.date: 09/24/2024
99
ai-usage: ai-assisted
1010

1111
#CustomerIntent: As an operator, I want to understand how to use the dataflow mapping language to transform data.
12+
ms.service: azure-iot-operations
1213
---
1314

1415
# Map data by using dataflows

articles/iot-operations/connect-to-cloud/howto-configure-adlsv2-endpoint.md

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,7 @@ ms.date: 10/16/2024
1010
ai-usage: ai-assisted
1111

1212
#CustomerIntent: As an operator, I want to understand how to configure dataflow endpoints for Azure Data Lake Storage Gen2 in Azure IoT Operations so that I can send data to Azure Data Lake Storage Gen2.
13+
ms.service: azure-iot-operations
1314
---
1415

1516
# Configure dataflow endpoints for Azure Data Lake Storage Gen2

articles/iot-operations/connect-to-cloud/howto-configure-adx-endpoint.md

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,7 @@ ms.date: 10/16/2024
1010
ai-usage: ai-assisted
1111

1212
#CustomerIntent: As an operator, I want to understand how to configure dataflow endpoints for Azure Data Explorer in Azure IoT Operations so that I can send data to Azure Data Explorer.
13+
ms.service: azure-iot-operations
1314
---
1415

1516
# Configure dataflow endpoints for Azure Data Explorer

articles/iot-operations/connect-to-cloud/howto-configure-dataflow-endpoint.md

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -9,6 +9,7 @@ ms.topic: how-to
99
ms.date: 09/17/2024
1010

1111
#CustomerIntent: As an operator, I want to understand how to configure source and destination endpoints so that I can create a dataflow.
12+
ms.service: azure-iot-operations
1213
---
1314

1415
# Configure dataflow endpoints

articles/iot-operations/connect-to-cloud/howto-configure-dataflow-profile.md

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -9,6 +9,7 @@ ms.topic: how-to
99
ms.date: 08/29/2024
1010

1111
#CustomerIntent: As an operator, I want to understand how to I can configure a a dataflow profile to control a dataflow behavior.
12+
ms.service: azure-iot-operations
1213
---
1314

1415
# Configure dataflow profile

0 commit comments

Comments
 (0)