|
| 1 | +--- |
| 2 | +title: include file |
| 3 | +description: C# Media Streaming quickstart |
| 4 | +services: azure-communication-services |
| 5 | +author: Kunaal |
| 6 | +ms.service: azure-communication-services |
| 7 | +ms.subservice: azure-communication-services |
| 8 | +ms.date: 10/25/2022 |
| 9 | +ms.topic: include |
| 10 | +ms.topic: include file |
| 11 | +ms.author: kpunjabi |
| 12 | +--- |
| 13 | + |
| 14 | +## Prerequisites |
| 15 | +- Azure account with an active subscription, for details see [Create an account for free.](https://azure.microsoft.com/free/) |
| 16 | +- Azure Communication Services resource. See [Create an Azure Communication Services resource](../../../create-communication-resource.md?tabs=windows&pivots=platform-azp) |
| 17 | +- Create a new web service application using the [Call Automation SDK](../../Callflows-for-customer-interactions.md). |
| 18 | +- The latest [.NET library](https://dotnet.microsoft.com/download/dotnet-core) for your operating system. |
| 19 | +- [Apache Maven](https://maven.apache.org/download.cgi). |
| 20 | +- A websocket server that can receive media streams. |
| 21 | + |
| 22 | +## Set up a websocket server |
| 23 | +Azure Communication Services requires your server application to set up a WebSocket server to stream audio in real-time. WebSocket is a standardized protocol that provides a full-duplex communication channel over a single TCP connection. |
| 24 | +You can optionally use Azure services Azure WebApps that allows you to create an application to receive audio streams over a websocket connection. Follow this [quickstart](https://azure.microsoft.com/blog/introduction-to-websockets-on-windows-azure-web-sites/). |
| 25 | + |
| 26 | +## Establish a call |
| 27 | +In this quickstart we assume that you're already familiar with starting calls. If you need to learn more about starting and establishing calls, you can follow our [quickstart](../../callflows-for-customer-interactions.md). For the purposes of this quickstart, we'll be going through the process of starting media streaming for both incoming calls and outbound calls. |
| 28 | + |
| 29 | +## Start media streaming - incoming call |
| 30 | +Your application will start receiving media streams once you answer the call and provide ACS with the WebSocket information. |
| 31 | + |
| 32 | +``` csharp |
| 33 | +var mediaStreamingOptions = new MediaStreamingOptions( |
| 34 | + new Uri("wss://testwebsocket.webpubsub.azure.com/client/hubs/media?accesstoken={access_token}"), |
| 35 | + MediaStreamingTransport.WebSocket, |
| 36 | + MediaStreamingContent.Audio, |
| 37 | + MediaStreamingAudioChannel.Mixed, |
| 38 | + ); |
| 39 | + var answerCallOptions = new AnswerCallOptions(incomingCallContext, callbackUri: new Uri(callConfiguration.AppCallbackUrl)) { |
| 40 | + MediaStreamingOptions = mediaStreamingOptions |
| 41 | + }; |
| 42 | + var response = await callingServerClient.AnswerCallAsync(answerCallOptions); |
| 43 | +``` |
| 44 | + |
| 45 | +## Start media streaming - outbound call |
| 46 | +Your application will start receiving media streams once you create the call and provide ACS with the WebSocket information. |
| 47 | + |
| 48 | +``` csharp |
| 49 | +var mediaStreamingOptions = new MediaStreamingOptions( |
| 50 | + new Uri("wss://{yourwebsocketurl}"), |
| 51 | + MediaStreamingTransport.WebSocket, |
| 52 | + MediaStreamingContent.Audio, |
| 53 | + MediaStreamingAudioChannel.Mixed, |
| 54 | +); |
| 55 | +var createCallOptions = new CreateCallOptions(callSource, new List < PhoneNumberIdentifier > { |
| 56 | + target |
| 57 | +}, new Uri(callConfiguration.AppCallbackUrl)) { |
| 58 | + MediaStreamingOptions = mediaStreamingOptions |
| 59 | +}; |
| 60 | +var createCallResult = await client.CreateCallAsync(createCallOptions); |
| 61 | +``` |
| 62 | +## Handling media streams in your websocket server |
| 63 | +The sample below demonstrates how to listen to media stream using your websocket server |
| 64 | + |
| 65 | +``` csharp |
| 66 | +HttpListener httpListener = new HttpListener(); |
| 67 | +httpListener.Prefixes.Add("http://localhost:80/"); |
| 68 | +httpListener.Start(); |
| 69 | +while (true) { |
| 70 | + HttpListenerContext httpListenerContext = await httpListener.GetContextAsync(); |
| 71 | + if (httpListenerContext.Request.IsWebSocketRequest) { |
| 72 | + WebSocketContext websocketContext; |
| 73 | + try { |
| 74 | + websocketContext = await httpListenerContext.AcceptWebSocketAsync(subProtocol: null); |
| 75 | + string ipAddress = httpListenerContext.Request.RemoteEndPoint.Address.ToString(); |
| 76 | + } catch (Exception ex) { |
| 77 | + httpListenerContext.Response.StatusCode = 500; |
| 78 | + httpListenerContext.Response.Close(); |
| 79 | + return; |
| 80 | + } |
| 81 | + WebSocket webSocket = websocketContext.WebSocket; |
| 82 | + try { |
| 83 | + while (webSocket.State == WebSocketState.Open || webSocket.State == WebSocketState.CloseSent) { |
| 84 | + byte[] receiveBuffer = new byte[2048]; |
| 85 | + var cancellationToken = new CancellationTokenSource(TimeSpan.FromSeconds(60)).Token; |
| 86 | + WebSocketReceiveResult receiveResult = await webSocket.ReceiveAsync(new ArraySegment < byte >. (receiveBuffer), cancellationToken); |
| 87 | + if (receiveResult.MessageType != WebSocketMessageType.Close) { |
| 88 | + var data = Encoding.UTF8.GetString(receiveBuffer).TrimEnd('\0'); |
| 89 | + try { |
| 90 | + var json = JsonConvert.DeserializeObject < Audio > (data); |
| 91 | + if (json != null) { |
| 92 | + var byteArray = json.AudioData; |
| 93 | + //Processing mixed audio data |
| 94 | + if (string.IsNullOrEmpty(json?.ParticipantId)) { |
| 95 | + if (string.IsNullOrEmpty(WebSocketData.FirstReceivedMixedAudioBufferTimeStamp)) { |
| 96 | + WebSocketData.FirstReceivedMixedAudioBufferTimeStamp = json.Timestamp; |
| 97 | + } |
| 98 | + //Process byteArray ( audioData ) however you want |
| 99 | + } |
| 100 | + } |
| 101 | + |
| 102 | + //Processing unmixed audio data |
| 103 | + else if (!string.IsNullOrEmpty(json?.ParticipantId) && !json.IsSilence) { |
| 104 | + if (json.ParticipantId != null) { |
| 105 | + switch (json.ParticipantId) { |
| 106 | + case { |
| 107 | + participantRawId1 |
| 108 | + }: |
| 109 | + //Process audio data |
| 110 | + break; |
| 111 | + case { |
| 112 | + participantRawId2 |
| 113 | + }:: |
| 114 | + //Process audio data |
| 115 | + break; |
| 116 | + default: |
| 117 | + break; |
| 118 | + } |
| 119 | + } |
| 120 | + if (string.IsNullOrEmpty(WebSocketData.FirstReceivedUnmixedAudioBufferTimeStamp)) { |
| 121 | + WebSocketData.FirstReceivedUnmixedAudioBufferTimeStamp = json.Timestamp; |
| 122 | + } |
| 123 | + } |
| 124 | + } catch {} |
| 125 | + } |
| 126 | + } |
| 127 | + } catch (Exception ex) {} |
| 128 | + } else { |
| 129 | + httpListenerContext.Response.StatusCode = 400; |
| 130 | + httpListenerContext.Response.Close(); |
| 131 | + } |
| 132 | +} |
| 133 | +``` |
0 commit comments