diff --git a/README.md b/README.md
index 4899f6d..419d463 100755
--- a/README.md
+++ b/README.md
@@ -167,7 +167,8 @@ Several sample applications demonstrating different scenarios are available in t
- **LocalFunctions** – showcases the local function calling feature.
- **GetPaid** – extends local functions to simulate payment requests.
- **DependencyInjection** – illustrates using the library with .NET DI.
-- **BrowserBridge** – ASP.NET Core application bridging a browser WebRTC client to OpenAI.
+- **ASP.NET Get Started** – ASP.NET application bridging a browser WebRTC client to OpenAI.
+- **ASP.NET Local Function** – ASP.NET application that builds on the Get Started example and adds a local function to tailor OpenAI responses.
Each example folder contains its own README with usage instructions.
diff --git a/examples/AspNetGetStarted/Dockerfile b/examples/AspNetGetStarted/Dockerfile
index 42c371c..c6fcd92 100755
--- a/examples/AspNetGetStarted/Dockerfile
+++ b/examples/AspNetGetStarted/Dockerfile
@@ -47,7 +47,7 @@ COPY [".", "."]
# Publish the application
FROM build AS publish
-RUN dotnet publish "./BrowserBridge.csproj" -c $BUILD_CONFIGURATION -o /app/publish
+RUN dotnet publish "./AspNetGetStarted.csproj" -c $BUILD_CONFIGURATION -o /app/publish
# Stage 4: Final Image to Run the App
FROM base AS final
diff --git a/examples/AspNetLocalFunction/AspNetLocalFunction.csproj b/examples/AspNetLocalFunction/AspNetLocalFunction.csproj
new file mode 100755
index 0000000..5a78b60
--- /dev/null
+++ b/examples/AspNetLocalFunction/AspNetLocalFunction.csproj
@@ -0,0 +1,23 @@
+
+
+
+ Exe
+ net8.0
+ 12.0
+ enable
+ true
+ true
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/examples/AspNetLocalFunction/Dockerfile b/examples/AspNetLocalFunction/Dockerfile
new file mode 100755
index 0000000..475d6d1
--- /dev/null
+++ b/examples/AspNetLocalFunction/Dockerfile
@@ -0,0 +1,79 @@
+# Stage 1: Build FFmpeg Image
+FROM sipsorcery/ffmpegbuild:7.0 AS ffmpeg
+
+# Stage 2: Base Image - Install FFmpeg dependencies (This will be cached)
+FROM ubuntu:24.04 AS base
+
+ENV DEBIAN_FRONTEND=noninteractive
+
+# Install the required libraries for FFmpeg in the final image (as root)
+RUN apt-get update && apt-get install -y \
+ libdrm2 \
+ libsdl2-2.0-0 \
+ libsndio7.0 \
+ libxvidcore4 \
+ libxv1 \
+ libass9 \
+ libvpx-dev \
+ libsdl2-dev \
+ libx264-dev \
+ libx265-dev \
+ libopus-dev \
+ libfreetype6-dev \
+ libvorbis-dev \
+ libxvidcore-dev \
+ libavutil-dev \
+ libssl-dev \
+ libavdevice-dev \
+ libfdk-aac-dev \
+ aspnetcore-runtime-8.0 \
+ && rm -rf /var/lib/apt/lists/*
+
+WORKDIR /app
+EXPOSE 8081
+
+# Stage 3: Build .NET Application (Only rebuilds if source code changes)
+FROM mcr.microsoft.com/dotnet/sdk:8.0 AS build
+
+ARG BUILD_CONFIGURATION=Release
+WORKDIR /src
+
+# Add local NuGet source (replace with your actual package version)
+#RUN mkdir -p /local-nuget
+#COPY ./local-nuget/*.nupkg /local-nuget/
+#RUN dotnet nuget add source /local-nuget --name local
+
+COPY [".", "."]
+
+# Publish the application
+FROM build AS publish
+RUN dotnet publish "./AspNetLocalFunction.csproj" -c $BUILD_CONFIGURATION -o /app/publish
+
+# Stage 4: Final Image to Run the App
+FROM base AS final
+
+WORKDIR /app
+
+# Copy the published app from the build image
+COPY --from=publish /app/publish .
+
+# Copy FFmpeg binaries and libraries from the FFmpeg build image
+COPY --from=ffmpeg /usr/local/bin/ffmpeg /usr/local/bin/
+COPY --from=ffmpeg /usr/local/bin/ffprobe /usr/local/bin/
+COPY --from=ffmpeg /usr/local/lib/libavcodec.so.61.3.100 /usr/local/lib/
+COPY --from=ffmpeg /usr/local/lib/libavdevice.so.61.1.100 /usr/local/lib/
+COPY --from=ffmpeg /usr/local/lib/libavfilter.so.10.1.100 /usr/local/lib/
+COPY --from=ffmpeg /usr/local/lib/libavformat.so.61.1.100 /usr/local/lib/
+COPY --from=ffmpeg /usr/local/lib/libavutil.so.59.8.100 /usr/local/lib/
+COPY --from=ffmpeg /usr/local/lib/libpostproc.so.58.1.100 /usr/local/lib/
+COPY --from=ffmpeg /usr/local/lib/libswresample.so.5.1.100 /usr/local/lib/
+COPY --from=ffmpeg /usr/local/lib/libswscale.so.8.1.100 /usr/local/lib/
+
+# Update library links
+RUN ldconfig
+
+# Ensure FFmpeg is available in the PATH for your app
+ENV PATH="/usr/local/bin:${PATH}"
+
+# Set entrypoint to run the .NET application
+ENTRYPOINT ["dotnet", "AspNetLocalFunction.dll"]
diff --git a/examples/AspNetLocalFunction/Program.cs b/examples/AspNetLocalFunction/Program.cs
new file mode 100755
index 0000000..c5e5a37
--- /dev/null
+++ b/examples/AspNetLocalFunction/Program.cs
@@ -0,0 +1,368 @@
+//-----------------------------------------------------------------------------
+// Filename: Program.cs
+//
+// Description: An example ASP.NET WebRTC application that can connect to OpenAI's
+// real-time API and use a local function to tailor the AI's voice responses.
+// https://platform.openai.com/docs/guides/realtime-webrtc.
+//
+// This demo builds on the ASPNetGetStarted example and adds a local function:
+// https://platform.openai.com/docs/guides/function-calling
+//
+// Browser clients can connect directly to OpenAI. The reason to use a bridging
+// asp.net app is to control and utilise the interaction on the asp.net app.
+// For example the asp.net could provide a local function to look some DB info etc.
+// based on user request.
+//
+// Usage:
+// set OPENAI_API_KEY=your_openai_key
+// dotnet run
+//
+// Author(s):
+// Aaron Clauson (aaron@sipsorcery.com)
+//
+// History:
+// 20 Jul 2025 Aaron Clauson Created, Dublin, Ireland.
+//
+// License:
+// BSD 3-Clause "New" or "Revised" License and the additional
+// BDS BY-NC-SA restriction, see included LICENSE.md file.
+//-----------------------------------------------------------------------------
+
+using LanguageExt;
+using Microsoft.AspNetCore.Builder;
+using Microsoft.AspNetCore.Http;
+using Microsoft.AspNetCore.Mvc;
+using Microsoft.Extensions.DependencyInjection;
+using Serilog;
+using Serilog.Extensions.Logging;
+using SIPSorcery.Net;
+using SIPSorcery.OpenAIWebRTC;
+using SIPSorcery.OpenAIWebRTC.Models;
+using SIPSorceryMedia.Abstractions;
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using System.Threading.Tasks;
+
+namespace demo;
+
+class Program
+{
+ private static string? _stunUrl = string.Empty;
+ private static string? _turnUrl = string.Empty;
+
+ static async Task Main(string[] args)
+ {
+ Log.Logger = new LoggerConfiguration()
+ .MinimumLevel.Debug()
+ .Enrich.FromLogContext()
+ .WriteTo.Console()
+ .CreateLogger();
+
+ var factory = new SerilogLoggerFactory(Log.Logger);
+ SIPSorcery.LogFactory.Set(factory);
+
+ Log.Information("WebRTC OpenAI ASP.NET Local Function Demo Program");
+
+ var openAiKey = Environment.GetEnvironmentVariable("OPENAI_API_KEY") ?? string.Empty;
+ _stunUrl = Environment.GetEnvironmentVariable("STUN_URL");
+ _turnUrl = Environment.GetEnvironmentVariable("TURN_URL");
+ bool.TryParse(Environment.GetEnvironmentVariable("WAIT_FOR_ICE_GATHERING_TO_SEND_OFFER"), out var waitForIceGatheringToSendOffer);
+
+ if (string.IsNullOrWhiteSpace(openAiKey))
+ {
+ Log.Logger.Error("Please provide your OpenAI key as an environment variable. For example: set OPENAI_API_KEY=");
+ return;
+ }
+
+ var builder = WebApplication.CreateBuilder();
+
+ builder.Host.UseSerilog();
+
+ builder.Services.AddLogging(builder =>
+ {
+ builder.AddSerilog(dispose: true);
+ });
+
+ builder.Services.AddOpenAIRealtimeWebRTC(openAiKey);
+
+ var app = builder.Build();
+
+ app.UseDefaultFiles();
+ app.UseStaticFiles();
+ var webSocketOptions = new WebSocketOptions
+ {
+ KeepAliveInterval = TimeSpan.FromMinutes(2)
+ };
+
+ app.UseWebSockets(webSocketOptions);
+
+ app.Map("/ws", async (HttpContext context,
+ [FromServices] IWebRTCEndPoint openAiWebRTCEndPoint) =>
+ {
+ Log.Debug("Web socket client connection established.");
+
+ if (context.WebSockets.IsWebSocketRequest)
+ {
+ var webSocket = await context.WebSockets.AcceptWebSocketAsync();
+
+ RTCConfiguration config = new RTCConfiguration
+ {
+ X_ICEIncludeAllInterfaceAddresses = true
+ };
+
+ var webSocketPeer = new WebRTCWebSocketPeerAspNet(
+ webSocket,
+ CreateBrowserPeerConnection,
+ config,
+ RTCSdpType.offer);
+
+ webSocketPeer.OfferOptions = new RTCOfferOptions
+ {
+ X_WaitForIceGatheringToComplete = waitForIceGatheringToSendOffer
+ };
+
+ await webSocketPeer.Run();
+
+ SetOpenAIPeerEventHandlers(openAiWebRTCEndPoint, webSocketPeer.RTCPeerConnection.DataChannels.First());
+
+ var openAiPeerTask = openAiWebRTCEndPoint.StartConnect(config);
+
+ await openAiPeerTask;
+
+ ConnectPeers(webSocketPeer.RTCPeerConnection, openAiWebRTCEndPoint);
+
+ Log.Debug("Web socket closing with WebRTC peer connection in state {state}.", webSocketPeer.RTCPeerConnection?.connectionState);
+ }
+ else
+ {
+ // Not a WebSocket request
+ context.Response.StatusCode = StatusCodes.Status400BadRequest;
+ }
+ });
+
+ await app.RunAsync();
+ }
+
+ private static void SetOpenAIPeerEventHandlers(IWebRTCEndPoint webrtcEndPoint, RTCDataChannel browserDataChannel)
+ {
+ webrtcEndPoint.OnPeerConnectionConnected += () =>
+ {
+ Log.Logger.Information("OpenAI WebRTC peer connection established.");
+
+ browserDataChannel.send("WebRTC connection established with OpenAI.");
+
+ // Trigger the conversation by sending a response create message.
+ var result = webrtcEndPoint.DataChannelMessenger.SendResponseCreate(RealtimeVoicesEnum.shimmer, "Say Hi!");
+ if (result.IsLeft)
+ {
+ Log.Logger.Error($"Failed to send response create message: {result.LeftAsEnumerable().First()}");
+ }
+ };
+
+ //webrtcEndPoint.OnDataChannelMessage += (dc, message) =>
+ //{
+ // if (message is RealtimeServerEventResponseAudioTranscriptDone done)
+ // {
+ // Log.Information($"Transcript done: {done.Transcript}");
+ // }
+ //};
+
+ webrtcEndPoint.OnDataChannelMessage += (dc, evt) => OnDataChannelMessage(dc, evt, browserDataChannel);
+ }
+
+ private static void ConnectPeers(RTCPeerConnection browserPc, IWebRTCEndPoint openAiEndPoint)
+ {
+ if (browserPc == null)
+ {
+ Log.Error("Browser peer connection is null.");
+ return;
+ }
+
+ openAiEndPoint.PeerConnection.Match(
+ pc =>
+ {
+ // Send RTP audio payloads receied from the brower WebRTC peer connection to OpenAI.
+ browserPc.PipeAudioTo(pc);
+
+ // Send RTP audio payloads received from OpenAI to the browser WebRTC peer connection.
+ pc.PipeAudioTo(browserPc);
+
+ // If the browser peer connection closes we need to close the OpenAI peer connection too.
+ browserPc.OnClosed += () => pc.Close("Browser peer closed.");
+
+ // If the OpenAI peer connection closes we need to close the browser peer connection too.
+ pc.OnClosed += () => browserPc.Close("OpenAI peer closed.");
+ },
+ () => Log.Error("OpenAI peer connection is null.")
+ );
+ }
+
+ ///
+ /// Method to create the peer connection with the browser.
+ ///
+ private static Task CreateBrowserPeerConnection(RTCConfiguration pcConfig)
+ {
+ pcConfig.iceServers = new List();
+
+ if (!string.IsNullOrWhiteSpace(_stunUrl))
+ {
+ pcConfig.iceServers.Add(_stunUrl.ParseStunServer());
+ }
+
+ if (!string.IsNullOrWhiteSpace(_turnUrl))
+ {
+ pcConfig.iceServers.Add(_turnUrl.ParseStunServer());
+ }
+
+ var peerConnection = new RTCPeerConnection(pcConfig);
+
+ MediaStreamTrack audioTrack = new MediaStreamTrack(AudioCommonlyUsedFormats.OpusWebRTC, MediaStreamStatusEnum.SendRecv);
+ peerConnection.addTrack(audioTrack);
+
+ // This call is synchronous when the WebRTC connection is not yet connected.
+ _ = peerConnection.createDataChannel("browser").Result;
+
+ return Task.FromResult(peerConnection);
+ }
+
+ ///
+ /// Event handler for WebRTC data channel messages.
+ ///
+ private static void OnDataChannelMessage(RTCDataChannel dc, RealtimeEventBase serverEvent, RTCDataChannel browserDataChannel)
+ {
+ switch (serverEvent)
+ {
+ case RealtimeServerEventResponseFunctionCallArgumentsDone argumentsDone:
+ Log.Information($"Function Arguments done: {argumentsDone.ToJson()}\n{argumentsDone.Arguments}");
+ OnFunctionArgumentsDone(dc, argumentsDone);
+ break;
+
+ case RealtimeServerEventSessionCreated sessionCreated:
+ Log.Information($"Session created: {sessionCreated.ToJson()}");
+ OnSessionCreated(dc);
+ break;
+
+ case RealtimeServerEventSessionUpdated sessionUpdated:
+ Log.Information($"Session updated: {sessionUpdated.ToJson()}");
+ break;
+
+ case RealtimeServerEventResponseAudioTranscriptDone transcriptionDone:
+ Log.Information($"Transcript done: {transcriptionDone.Transcript}");
+ browserDataChannel.send($"AI: {transcriptionDone.Transcript?.Trim()}");
+ break;
+
+ default:
+ //logger.LogInformation($"Data Channel {serverEvent.Type} message received.");
+ break;
+ }
+ }
+
+ ///
+ /// Sends a session update message to add the get weather demo function.
+ ///
+ private static void OnSessionCreated(RTCDataChannel dc)
+ {
+ var sessionUpdate = new RealtimeClientEventSessionUpdate
+ {
+ EventID = Guid.NewGuid().ToString(),
+ Session = new RealtimeSession
+ {
+ Instructions = "You are a weather bot who favours brevity and accuracy.",
+ Tools = new List
+ {
+ new RealtimeTool
+ {
+ Name = "get_weather",
+ Description = "Get the current weather.",
+ Parameters = new RealtimeToolParameters
+ {
+ Properties = new Dictionary
+ {
+ { "location", new RealtimeToolProperty { Type = "string" } }
+ },
+ Required = new List { "location" }
+ }
+ }
+ }
+ }
+ };
+
+ Log.Information($"Sending OpenAI session update to data channel {dc.label}.");
+ Log.Debug(sessionUpdate.ToJson());
+
+ dc.send(sessionUpdate.ToJson());
+ }
+
+ private static void OnFunctionArgumentsDone(RTCDataChannel dc, RealtimeServerEventResponseFunctionCallArgumentsDone argsDone)
+ {
+ var result = argsDone.Name switch
+ {
+ "get_weather" => GetWeather(argsDone),
+ _ => "Unknown Function."
+ };
+
+ Log.Information($"Call {argsDone.Name} with args {argsDone.Arguments} result {result}.");
+
+ var resultConvItem = new RealtimeClientEventConversationItemCreate
+ {
+ EventID = Guid.NewGuid().ToString(),
+ Item = new RealtimeConversationItem
+ {
+ Type = RealtimeConversationItemTypeEnum.function_call_output,
+ CallID = argsDone.CallId,
+ Output = result
+ }
+ };
+
+ Log.Debug(resultConvItem.ToJson());
+ dc.send(resultConvItem.ToJson());
+
+ // Tell the AI to continue the conversation.
+ var responseCreate = new RealtimeClientEventResponseCreate
+ {
+ EventID = Guid.NewGuid().ToString(),
+ Response = new RealtimeResponseCreateParams
+ {
+ Instructions = "Please give me the answer.",
+ }
+ };
+
+ dc.send(responseCreate.ToJson());
+ }
+
+ ///
+ /// The local function to call and return the result to the AI to continue the conversation.
+ ///
+ private static string GetWeather(RealtimeServerEventResponseFunctionCallArgumentsDone argsDone)
+ {
+ var location = argsDone.Arguments.GetNamedArgumentValue("location") ?? string.Empty;
+
+ return location switch
+ {
+ string s when s.Contains("Canberra", StringComparison.OrdinalIgnoreCase) => "It's cloudy and 15 degrees.",
+ string s when s.Contains("Dublin", StringComparison.OrdinalIgnoreCase) => "It's raining and 7 degrees.",
+ string s when s.Contains("Hobart", StringComparison.OrdinalIgnoreCase) => "It's sunny and 25 degrees.",
+ string s when s.Contains("Melbourne", StringComparison.OrdinalIgnoreCase) => "It's cold and wet and 11 degrees.",
+ string s when s.Contains("Sydney", StringComparison.OrdinalIgnoreCase) => "It's humid and stormy and 30 degrees.",
+ string s when s.Contains("Perth", StringComparison.OrdinalIgnoreCase) => "It's hot and dry and 40 degrees.",
+ _ => "It's sunny and 20 degrees."
+ };
+ }
+}
+
+public static class StunServerExtensions
+{
+ public static RTCIceServer ParseStunServer(this string stunServer)
+ {
+ var fields = stunServer.Split(';');
+
+ return new RTCIceServer
+ {
+ urls = fields[0],
+ username = fields.Length > 1 ? fields[1] : null,
+ credential = fields.Length > 2 ? fields[2] : null,
+ credentialType = RTCIceCredentialType.password
+ };
+ }
+}
\ No newline at end of file
diff --git a/examples/AspNetLocalFunction/Properties/launchSettings.json b/examples/AspNetLocalFunction/Properties/launchSettings.json
new file mode 100755
index 0000000..dcf4edf
--- /dev/null
+++ b/examples/AspNetLocalFunction/Properties/launchSettings.json
@@ -0,0 +1,12 @@
+{
+ "profiles": {
+ "AspNetLocalFunction": {
+ "commandName": "Project",
+ "launchBrowser": true,
+ "environmentVariables": {
+ "ASPNETCORE_ENVIRONMENT": "Development"
+ },
+ "applicationUrl": "https://localhost:57790;http://localhost:57791"
+ }
+ }
+}
\ No newline at end of file
diff --git a/examples/AspNetLocalFunction/README.md b/examples/AspNetLocalFunction/README.md
new file mode 100755
index 0000000..7da9ff3
--- /dev/null
+++ b/examples/AspNetLocalFunction/README.md
@@ -0,0 +1,78 @@
+# OpenAI WebRTC ASP.NET Local Function Calling Example
+
+This ASP.NET web application demonstrates how to bridge a browser WebRTC client to OpenAI's real-time API, enabling local function calling and live transcript relay via the browser.
+
+## Features
+
+- **Browser-based UI**: Connect from any modern browser to interact with OpenAI's real-time WebRTC API via an ASP.NET application.
+- **Live Audio & Transcription**: Streams audio from your browser to OpenAI and displays live transcriptions.
+- **Local Function Calling**: Implements a local function (e.g., `get_weather`) that is invoked by OpenAI based on user speech.
+- **Data Channel Relay**: Transcripts and other events are relayed from OpenAI to the browser via a WebRTC data channel.
+- **Diagnostics & Status**: Visual panels for diagnostics and data channel status/messages in the browser UI.
+
+## How it Works
+
+1. The browser connects to the ASP.NET app via WebSocket and negotiates a WebRTC session.
+2. The ASP.NET app establishes a second WebRTC session with OpenAI's real-time API.
+3. Audio is piped between the browser and OpenAI.
+4. Transcription and function call events from OpenAI are relayed to the browser via a WebRTC data channel.
+5. The browser UI displays live transcriptions and data channel messages.
+6. Local functions (like `get_weather`) are handled in the ASP.NET app and results are sent back to OpenAI.
+
+## Usage
+
+```bash
+set OPENAI_API_KEY=your_openai_key
+# Optionally set STUN_URL and TURN_URL for ICE servers
+# set STUN_URL=stun:stun.cloudflare.com
+# set TURN_URL=turn:your.turn.server;username;password
+
+dotnet run
+```
+
+Then open your browser and navigate to `https://localhost:57790` (or the port shown in the console).
+
+## Browser UI Overview
+
+- **Start/Close**: Begin or end the WebRTC session and microphone access.
+- **Data Channel Panel**: Shows live messages and status (green/red icon) for the data channel.
+- **Diagnostics Panel**: (Optional) Toggle to view detailed connection and event logs.
+
+## Function Calling Flow
+
+```text
+[User Speaks in Browser] ──► [ASP.NET app ] ──► [Transcription by OpenAI] ──► [OpenAI Function Call Request]
+ ▼
+ [get_weather(location)]
+ ▼
+ [Local Function Execution in ASP.NET]
+ ▼
+ [Result Returned to OpenAI]
+ ▼
+ [AI Continues Response]
+```
+
+## Example Local Function
+
+```csharp
+private static string GetWeather(RealtimeServerEventResponseFunctionCallArgumentsDone argsDone)
+{
+ var location = argsDone.Arguments.GetNamedArgumentValue("location") ?? string.Empty;
+ return location switch
+ {
+ string s when s.Contains("Dublin") => "It's raining and 7 degrees.",
+ string s when s.Contains("Sydney") => "It's humid and stormy and 30 degrees.",
+ _ => "It's sunny and 20 degrees."
+ };
+}
+```
+
+## Project Structure
+
+- **Program.cs**: ASP.NET Core entry point, WebSocket/WebRTC negotiation, function call logic.
+- **wwwroot/index.html**: Browser UI for audio, transcript, and diagnostics.
+- **/src**: Core library for OpenAI WebRTC integration.
+
+## License
+
+BSD 3-Clause "New" or "Revised" License and an additional BY-NC-SA restriction. See `LICENSE.md` for details.
diff --git a/examples/AspNetLocalFunction/wwwroot/favicon.ico b/examples/AspNetLocalFunction/wwwroot/favicon.ico
new file mode 100644
index 0000000..c034564
Binary files /dev/null and b/examples/AspNetLocalFunction/wwwroot/favicon.ico differ
diff --git a/examples/AspNetLocalFunction/wwwroot/index.html b/examples/AspNetLocalFunction/wwwroot/index.html
new file mode 100755
index 0000000..9d61efe
--- /dev/null
+++ b/examples/AspNetLocalFunction/wwwroot/index.html
@@ -0,0 +1,413 @@
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/examples/SIPSorcery.OpenAI.WebRTC.Examples.sln b/examples/SIPSorcery.OpenAI.WebRTC.Examples.sln
index 5db22e7..027a867 100755
--- a/examples/SIPSorcery.OpenAI.WebRTC.Examples.sln
+++ b/examples/SIPSorcery.OpenAI.WebRTC.Examples.sln
@@ -17,6 +17,8 @@ Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "AliceAndBob", "AliceAndBob\
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "AspNetGetStarted", "AspNetGetStarted\AspNetGetStarted.csproj", "{1B845969-CEE6-4656-A97B-1CC5422A0CFF}"
EndProject
+Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "AspNetLocalFunction", "AspNetLocalFunction\AspNetLocalFunction.csproj", "{C03D7AB6-8410-02A8-41C3-022FF23860AC}"
+EndProject
Global
GlobalSection(SolutionConfigurationPlatforms) = preSolution
Debug|Any CPU = Debug|Any CPU
@@ -51,6 +53,10 @@ Global
{1B845969-CEE6-4656-A97B-1CC5422A0CFF}.Debug|Any CPU.Build.0 = Debug|Any CPU
{1B845969-CEE6-4656-A97B-1CC5422A0CFF}.Release|Any CPU.ActiveCfg = Release|Any CPU
{1B845969-CEE6-4656-A97B-1CC5422A0CFF}.Release|Any CPU.Build.0 = Release|Any CPU
+ {C03D7AB6-8410-02A8-41C3-022FF23860AC}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {C03D7AB6-8410-02A8-41C3-022FF23860AC}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {C03D7AB6-8410-02A8-41C3-022FF23860AC}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {C03D7AB6-8410-02A8-41C3-022FF23860AC}.Release|Any CPU.Build.0 = Release|Any CPU
EndGlobalSection
GlobalSection(SolutionProperties) = preSolution
HideSolutionNode = FALSE