diff --git a/CaptureEncoder/AudioInterface.cs b/CaptureEncoder/AudioInterface.cs
new file mode 100644
index 0000000..359bb28
--- /dev/null
+++ b/CaptureEncoder/AudioInterface.cs
@@ -0,0 +1,25 @@
+using System;
+using System.Runtime.InteropServices;
+
+namespace CaptureEncoder
+{
+
+ // To populate an AudioFrame with audio data,
+ // you must get access to the underlying memory buffer
+ // of the audio frame.To do this you must initialize
+ // the IMemoryBufferByteAccess COM interface
+ // by adding the following code within your namespace.
+ //
+ [ComImport]
+ [Guid("5B0D3235-4DBA-4D44-865E-8F1D0E4FD04D")]
+ [InterfaceType(ComInterfaceType.InterfaceIsIUnknown)]
+ unsafe interface IMemoryBufferByteAccess
+ {
+ void GetBuffer(out byte* buffer, out uint capacity);
+ }
+ // You must also configure your project in Microsoft Visual Studio
+ // to allow the compilation of unsafe code
+ // by opening the project's Properties page,
+ // clicking the Build property page,
+ // and selecting the Allow Unsafe Code checkbox
+}
diff --git a/CaptureEncoder/CaptureEncoder.csproj b/CaptureEncoder/CaptureEncoder.csproj
index b9450c4..135a9e4 100644
--- a/CaptureEncoder/CaptureEncoder.csproj
+++ b/CaptureEncoder/CaptureEncoder.csproj
@@ -11,7 +11,7 @@
CaptureEncoder
en-US
UAP
- 10.0.18362.0
+ 10.0.17763.0
10.0.17763.0
14
512
@@ -27,6 +27,7 @@
DEBUG;TRACE;NETFX_CORE;WINDOWS_UWP
prompt
4
+ true
AnyCPU
@@ -106,6 +107,7 @@
full
false
prompt
+ true
x64
@@ -121,6 +123,7 @@
PackageReference
+
diff --git a/CaptureEncoder/Encoder.cs b/CaptureEncoder/Encoder.cs
index f8074e9..18a9120 100644
--- a/CaptureEncoder/Encoder.cs
+++ b/CaptureEncoder/Encoder.cs
@@ -3,13 +3,21 @@
using System;
using System.Diagnostics;
+using System.Runtime.InteropServices;
+using System.Runtime.InteropServices.WindowsRuntime;
using System.Threading.Tasks;
+using Windows.Devices.Enumeration;
using Windows.Foundation;
using Windows.Graphics.Capture;
using Windows.Graphics.DirectX.Direct3D11;
+using Windows.Media;
+using Windows.Media.Audio;
+using Windows.Media.Capture;
using Windows.Media.Core;
+using Windows.Media.Devices;
using Windows.Media.MediaProperties;
using Windows.Media.Transcoding;
+using Windows.Storage;
using Windows.Storage.Streams;
namespace CaptureEncoder
@@ -23,8 +31,42 @@ public Encoder(IDirect3DDevice device, GraphicsCaptureItem item)
_isRecording = false;
CreateMediaObjects();
+
+
}
+ private async Task CreateAudioObjects()
+ {
+ AudioGraphSettings settings = new AudioGraphSettings(Windows.Media.Render.AudioRenderCategory.Media);
+ settings.QuantumSizeSelectionMode = QuantumSizeSelectionMode.LowestLatency;
+ // create AudioGraph
+ var result = await AudioGraph.CreateAsync(settings);
+ if (result.Status != AudioGraphCreationStatus.Success)
+ {
+ Debug.WriteLine("AudioGraph creation error: " + result.Status.ToString());
+ return;
+ }
+ _audioGraph = result.Graph;
+
+ // create device input _ a microphone
+ var deviceInputResult = await _audioGraph.CreateDeviceInputNodeAsync(MediaCategory.Other);
+ if (deviceInputResult.Status != AudioDeviceNodeCreationStatus.Success)
+ {
+ Debug.WriteLine($"Audio Device Input unavailable because {deviceInputResult.Status.ToString()}");
+
+ return;
+ }
+ _deviceInputNode = deviceInputResult.DeviceInputNode;
+
+ // create output frame
+ _frameOutputNode = _audioGraph.CreateFrameOutputNode();
+ // increase volume of input
+ _deviceInputNode.OutgoingGain = 10;
+ _deviceInputNode.AddOutgoingConnection(_frameOutputNode);
+
+ }
+
+
public IAsyncAction EncodeAsync(IRandomAccessStream stream, uint width, uint height, uint bitrateInBps, uint frameRate)
{
return EncodeInternalAsync(stream, width, height, bitrateInBps, frameRate).AsAsyncAction();
@@ -44,8 +86,8 @@ private async Task EncodeInternalAsync(IRandomAccessStream stream, uint width, u
using (_frameGenerator)
{
var encodingProfile = new MediaEncodingProfile();
- encodingProfile.Container.Subtype = "MPEG4";
- encodingProfile.Video.Subtype = "H264";
+ encodingProfile.Container.Subtype = MediaEncodingSubtypes.Mpeg4;
+ encodingProfile.Video.Subtype = MediaEncodingSubtypes.H264;
encodingProfile.Video.Width = width;
encodingProfile.Video.Height = height;
encodingProfile.Video.Bitrate = bitrateInBps;
@@ -53,8 +95,22 @@ private async Task EncodeInternalAsync(IRandomAccessStream stream, uint width, u
encodingProfile.Video.FrameRate.Denominator = 1;
encodingProfile.Video.PixelAspectRatio.Numerator = 1;
encodingProfile.Video.PixelAspectRatio.Denominator = 1;
- var transcode = await _transcoder.PrepareMediaStreamSourceTranscodeAsync(_mediaStreamSource, stream, encodingProfile);
+ // Describe audio input
+ encodingProfile.Audio = MediaEncodingProfile.CreateMp3(AudioEncodingQuality.Low).Audio;
+
+ // create audio graph
+ if (_audioGraph==null)
+ {
+ await CreateAudioObjects();
+ }
+
+ // add audio support
+ _audioDescriptor = new AudioStreamDescriptor(_audioGraph.EncodingProperties);
+ _mediaStreamSource.AddStreamDescriptor(_audioDescriptor);
+
+
+ var transcode = await _transcoder.PrepareMediaStreamSourceTranscodeAsync(_mediaStreamSource, stream, encodingProfile);
await transcode.TranscodeAsync();
}
}
@@ -76,13 +132,15 @@ public void Dispose()
_isRecording = false;
}
- private void DisposeInternal()
+ private void DisposeInternal()
{
_frameGenerator.Dispose();
+
}
private void CreateMediaObjects()
{
+
// Create our encoding profile based on the size of the item
int width = _captureItem.Size.Width;
int height = _captureItem.Size.Height;
@@ -93,35 +151,83 @@ private void CreateMediaObjects()
// Create our MediaStreamSource
_mediaStreamSource = new MediaStreamSource(_videoDescriptor);
- _mediaStreamSource.BufferTime = TimeSpan.FromSeconds(0);
+ _mediaStreamSource.CanSeek = true;
+ _mediaStreamSource.BufferTime = TimeSpan.FromMilliseconds(0);
_mediaStreamSource.Starting += OnMediaStreamSourceStarting;
_mediaStreamSource.SampleRequested += OnMediaStreamSourceSampleRequested;
+ _mediaStreamSource.Closed += (s,e) => {
+ Debug.WriteLine("Stop AudioGraph");
+ _audioGraph?.Stop();
+
+ };
// Create our transcoder
_transcoder = new MediaTranscoder();
_transcoder.HardwareAccelerationEnabled = true;
}
- private void OnMediaStreamSourceSampleRequested(MediaStreamSource sender, MediaStreamSourceSampleRequestedEventArgs args)
+
+ unsafe private void OnMediaStreamSourceSampleRequested(MediaStreamSource sender, MediaStreamSourceSampleRequestedEventArgs args)
{
if (_isRecording && !_closed)
{
try
{
- using (var frame = _frameGenerator.WaitForNewFrame())
+
+ if (args.Request.StreamDescriptor.GetType() == typeof(VideoStreamDescriptor))
{
- if (frame == null)
+ // Request Video
+ using (var frame = _frameGenerator.WaitForNewFrame())
+ {
+ if (frame == null)
+ {
+ args.Request.Sample = null;
+ DisposeInternal();
+ return;
+ }
+ var timeStamp = frame.SystemRelativeTime- timeOffset;
+ var sample = MediaStreamSample.CreateFromDirect3D11Surface(frame.Surface, timeStamp);
+ args.Request.Sample = sample;
+ }
+ }
+ else if (args.Request.StreamDescriptor.GetType() == typeof(AudioStreamDescriptor))
+ {
+ var request = args.Request;
+
+ var deferal = request.GetDeferral();
+
+ var frame = _frameOutputNode.GetFrame();
+ if (frame.Duration.GetValueOrDefault().TotalSeconds==0)
{
args.Request.Sample = null;
- DisposeInternal();
return;
}
+ using (AudioBuffer buffer = frame.LockBuffer(AudioBufferAccessMode.Write))
+ using (IMemoryBufferReference reference = buffer.CreateReference())
+ {
+ byte* dataInBytes;
+ uint capacityInBytes;
+ // Get the buffer from the AudioFrame
+ ((IMemoryBufferByteAccess)reference).GetBuffer(out dataInBytes, out capacityInBytes);
+ byte[] bytes = new byte[capacityInBytes];
+ Marshal.Copy((IntPtr)dataInBytes, bytes, 0, (int)capacityInBytes);
+ var data_buffer = WindowsRuntimeBufferExtensions.AsBuffer(bytes, 0, (int)capacityInBytes);
+
+ var stamp = frame.RelativeTime.GetValueOrDefault();
+ var duration = frame.Duration.GetValueOrDefault();
+
+ var sample = MediaStreamSample.CreateFromBuffer(data_buffer, stamp);
+ sample.Duration = duration;
+ sample.KeyFrame = true;
+
+ request.Sample = sample;
+
+ }
- var timeStamp = frame.SystemRelativeTime;
+ deferal.Complete();
- var sample = MediaStreamSample.CreateFromDirect3D11Surface(frame.Surface, timeStamp);
- args.Request.Sample = sample;
}
+
}
catch (Exception e)
{
@@ -139,11 +245,21 @@ private void OnMediaStreamSourceSampleRequested(MediaStreamSource sender, MediaS
}
}
+
+
private void OnMediaStreamSourceStarting(MediaStreamSource sender, MediaStreamSourceStartingEventArgs args)
{
+ MediaStreamSourceStartingRequest request = args.Request;
+
using (var frame = _frameGenerator.WaitForNewFrame())
{
- args.Request.SetActualStartPosition(frame.SystemRelativeTime);
+ timeOffset = frame.SystemRelativeTime;
+ //request.SetActualStartPosition(frame.SystemRelativeTime);
+ }
+ _audioGraph?.Start();
+ using (var audioFrame = _frameOutputNode.GetFrame())
+ {
+ timeOffset = timeOffset + audioFrame.RelativeTime.GetValueOrDefault();
}
}
@@ -153,9 +269,17 @@ private void OnMediaStreamSourceStarting(MediaStreamSource sender, MediaStreamSo
private CaptureFrameWait _frameGenerator;
private VideoStreamDescriptor _videoDescriptor;
+ private AudioStreamDescriptor _audioDescriptor;
private MediaStreamSource _mediaStreamSource;
private MediaTranscoder _transcoder;
private bool _isRecording;
private bool _closed = false;
+
+ // audio graph and nodes
+ private AudioGraph _audioGraph;
+ private AudioDeviceInputNode _deviceInputNode;
+ private AudioFrameOutputNode _frameOutputNode;
+ private TimeSpan timeOffset = new TimeSpan();
+
}
}
diff --git a/SimpleRecorder/Package.appxmanifest b/SimpleRecorder/Package.appxmanifest
index ecfec7b..7d71832 100644
--- a/SimpleRecorder/Package.appxmanifest
+++ b/SimpleRecorder/Package.appxmanifest
@@ -24,5 +24,7 @@
+
+
\ No newline at end of file
diff --git a/SimpleRecorder/SimpleRecorder.csproj b/SimpleRecorder/SimpleRecorder.csproj
index aebe0a5..157a990 100644
--- a/SimpleRecorder/SimpleRecorder.csproj
+++ b/SimpleRecorder/SimpleRecorder.csproj
@@ -11,7 +11,7 @@
SimpleRecorder
en-US
UAP
- 10.0.18362.0
+ 10.0.17763.0
10.0.17763.0
14
512