Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
25 changes: 25 additions & 0 deletions CaptureEncoder/AudioInterface.cs
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
using System;
using System.Runtime.InteropServices;

namespace CaptureEncoder
{

// To populate an AudioFrame with audio data,
// you must get access to the underlying memory buffer
// of the audio frame.To do this you must initialize
// the IMemoryBufferByteAccess COM interface
// by adding the following code within your namespace.
//
[ComImport]
[Guid("5B0D3235-4DBA-4D44-865E-8F1D0E4FD04D")]
[InterfaceType(ComInterfaceType.InterfaceIsIUnknown)]
unsafe interface IMemoryBufferByteAccess
{
void GetBuffer(out byte* buffer, out uint capacity);
}
// You must also configure your project in Microsoft Visual Studio
// to allow the compilation of unsafe code
// by opening the project's Properties page,
// clicking the Build property page,
// and selecting the Allow Unsafe Code checkbox
}
5 changes: 4 additions & 1 deletion CaptureEncoder/CaptureEncoder.csproj
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
<AssemblyName>CaptureEncoder</AssemblyName>
<DefaultLanguage>en-US</DefaultLanguage>
<TargetPlatformIdentifier>UAP</TargetPlatformIdentifier>
<TargetPlatformVersion Condition=" '$(TargetPlatformVersion)' == '' ">10.0.18362.0</TargetPlatformVersion>
<TargetPlatformVersion Condition=" '$(TargetPlatformVersion)' == '' ">10.0.17763.0</TargetPlatformVersion>
<TargetPlatformMinVersion>10.0.17763.0</TargetPlatformMinVersion>
<MinimumVisualStudioVersion>14</MinimumVisualStudioVersion>
<FileAlignment>512</FileAlignment>
Expand All @@ -27,6 +27,7 @@
<DefineConstants>DEBUG;TRACE;NETFX_CORE;WINDOWS_UWP</DefineConstants>
<ErrorReport>prompt</ErrorReport>
<WarningLevel>4</WarningLevel>
<AllowUnsafeBlocks>true</AllowUnsafeBlocks>
</PropertyGroup>
<PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Release|AnyCPU' ">
<PlatformTarget>AnyCPU</PlatformTarget>
Expand Down Expand Up @@ -106,6 +107,7 @@
<DebugType>full</DebugType>
<UseVSHostingProcess>false</UseVSHostingProcess>
<ErrorReport>prompt</ErrorReport>
<AllowUnsafeBlocks>true</AllowUnsafeBlocks>
</PropertyGroup>
<PropertyGroup Condition="'$(Configuration)|$(Platform)' == 'Release|x64'">
<PlatformTarget>x64</PlatformTarget>
Expand All @@ -121,6 +123,7 @@
<RestoreProjectStyle>PackageReference</RestoreProjectStyle>
</PropertyGroup>
<ItemGroup>
<Compile Include="AudioInterface.cs" />
<Compile Include="CaptureFrameWait.cs" />
<Compile Include="Direct3D11Helpers.cs" />
<Compile Include="Encoder.cs" />
Expand Down
150 changes: 137 additions & 13 deletions CaptureEncoder/Encoder.cs
Original file line number Diff line number Diff line change
Expand Up @@ -3,13 +3,21 @@

using System;
using System.Diagnostics;
using System.Runtime.InteropServices;
using System.Runtime.InteropServices.WindowsRuntime;
using System.Threading.Tasks;
using Windows.Devices.Enumeration;
using Windows.Foundation;
using Windows.Graphics.Capture;
using Windows.Graphics.DirectX.Direct3D11;
using Windows.Media;
using Windows.Media.Audio;
using Windows.Media.Capture;
using Windows.Media.Core;
using Windows.Media.Devices;
using Windows.Media.MediaProperties;
using Windows.Media.Transcoding;
using Windows.Storage;
using Windows.Storage.Streams;

namespace CaptureEncoder
Expand All @@ -23,8 +31,42 @@ public Encoder(IDirect3DDevice device, GraphicsCaptureItem item)
_isRecording = false;

CreateMediaObjects();


}

private async Task CreateAudioObjects()
{
AudioGraphSettings settings = new AudioGraphSettings(Windows.Media.Render.AudioRenderCategory.Media);
settings.QuantumSizeSelectionMode = QuantumSizeSelectionMode.LowestLatency;
// create AudioGraph
var result = await AudioGraph.CreateAsync(settings);
if (result.Status != AudioGraphCreationStatus.Success)
{
Debug.WriteLine("AudioGraph creation error: " + result.Status.ToString());
return;
}
_audioGraph = result.Graph;

// create device input _ a microphone
var deviceInputResult = await _audioGraph.CreateDeviceInputNodeAsync(MediaCategory.Other);
if (deviceInputResult.Status != AudioDeviceNodeCreationStatus.Success)
{
Debug.WriteLine($"Audio Device Input unavailable because {deviceInputResult.Status.ToString()}");

return;
}
_deviceInputNode = deviceInputResult.DeviceInputNode;

// create output frame
_frameOutputNode = _audioGraph.CreateFrameOutputNode();
// increase volume of input
_deviceInputNode.OutgoingGain = 10;
_deviceInputNode.AddOutgoingConnection(_frameOutputNode);

}


public IAsyncAction EncodeAsync(IRandomAccessStream stream, uint width, uint height, uint bitrateInBps, uint frameRate)
{
return EncodeInternalAsync(stream, width, height, bitrateInBps, frameRate).AsAsyncAction();
Expand All @@ -44,17 +86,31 @@ private async Task EncodeInternalAsync(IRandomAccessStream stream, uint width, u
using (_frameGenerator)
{
var encodingProfile = new MediaEncodingProfile();
encodingProfile.Container.Subtype = "MPEG4";
encodingProfile.Video.Subtype = "H264";
encodingProfile.Container.Subtype = MediaEncodingSubtypes.Mpeg4;
encodingProfile.Video.Subtype = MediaEncodingSubtypes.H264;
encodingProfile.Video.Width = width;
encodingProfile.Video.Height = height;
encodingProfile.Video.Bitrate = bitrateInBps;
encodingProfile.Video.FrameRate.Numerator = frameRate;
encodingProfile.Video.FrameRate.Denominator = 1;
encodingProfile.Video.PixelAspectRatio.Numerator = 1;
encodingProfile.Video.PixelAspectRatio.Denominator = 1;
var transcode = await _transcoder.PrepareMediaStreamSourceTranscodeAsync(_mediaStreamSource, stream, encodingProfile);
// Describe audio input
encodingProfile.Audio = MediaEncodingProfile.CreateMp3(AudioEncodingQuality.Low).Audio;


// create audio graph
if (_audioGraph==null)
{
await CreateAudioObjects();
}

// add audio support
_audioDescriptor = new AudioStreamDescriptor(_audioGraph.EncodingProperties);
_mediaStreamSource.AddStreamDescriptor(_audioDescriptor);


var transcode = await _transcoder.PrepareMediaStreamSourceTranscodeAsync(_mediaStreamSource, stream, encodingProfile);
await transcode.TranscodeAsync();
}
}
Expand All @@ -76,13 +132,15 @@ public void Dispose()
_isRecording = false;
}

private void DisposeInternal()
private void DisposeInternal()
{
_frameGenerator.Dispose();

}

private void CreateMediaObjects()
{

// Create our encoding profile based on the size of the item
int width = _captureItem.Size.Width;
int height = _captureItem.Size.Height;
Expand All @@ -93,35 +151,83 @@ private void CreateMediaObjects()

// Create our MediaStreamSource
_mediaStreamSource = new MediaStreamSource(_videoDescriptor);
_mediaStreamSource.BufferTime = TimeSpan.FromSeconds(0);
_mediaStreamSource.CanSeek = true;
_mediaStreamSource.BufferTime = TimeSpan.FromMilliseconds(0);
_mediaStreamSource.Starting += OnMediaStreamSourceStarting;
_mediaStreamSource.SampleRequested += OnMediaStreamSourceSampleRequested;
_mediaStreamSource.Closed += (s,e) => {
Debug.WriteLine("Stop AudioGraph");
_audioGraph?.Stop();

};

// Create our transcoder
_transcoder = new MediaTranscoder();
_transcoder.HardwareAccelerationEnabled = true;
}

private void OnMediaStreamSourceSampleRequested(MediaStreamSource sender, MediaStreamSourceSampleRequestedEventArgs args)

unsafe private void OnMediaStreamSourceSampleRequested(MediaStreamSource sender, MediaStreamSourceSampleRequestedEventArgs args)
{
if (_isRecording && !_closed)
{
try
{
using (var frame = _frameGenerator.WaitForNewFrame())

if (args.Request.StreamDescriptor.GetType() == typeof(VideoStreamDescriptor))
{
if (frame == null)
// Request Video
using (var frame = _frameGenerator.WaitForNewFrame())
{
if (frame == null)
{
args.Request.Sample = null;
DisposeInternal();
return;
}
var timeStamp = frame.SystemRelativeTime- timeOffset;
var sample = MediaStreamSample.CreateFromDirect3D11Surface(frame.Surface, timeStamp);
args.Request.Sample = sample;
}
}
else if (args.Request.StreamDescriptor.GetType() == typeof(AudioStreamDescriptor))
{
var request = args.Request;

var deferal = request.GetDeferral();

var frame = _frameOutputNode.GetFrame();
if (frame.Duration.GetValueOrDefault().TotalSeconds==0)
{
args.Request.Sample = null;
DisposeInternal();
return;
}
using (AudioBuffer buffer = frame.LockBuffer(AudioBufferAccessMode.Write))
using (IMemoryBufferReference reference = buffer.CreateReference())
{
byte* dataInBytes;
uint capacityInBytes;
// Get the buffer from the AudioFrame
((IMemoryBufferByteAccess)reference).GetBuffer(out dataInBytes, out capacityInBytes);
byte[] bytes = new byte[capacityInBytes];
Marshal.Copy((IntPtr)dataInBytes, bytes, 0, (int)capacityInBytes);
var data_buffer = WindowsRuntimeBufferExtensions.AsBuffer(bytes, 0, (int)capacityInBytes);

var stamp = frame.RelativeTime.GetValueOrDefault();
var duration = frame.Duration.GetValueOrDefault();

var sample = MediaStreamSample.CreateFromBuffer(data_buffer, stamp);
sample.Duration = duration;
sample.KeyFrame = true;

request.Sample = sample;

}

var timeStamp = frame.SystemRelativeTime;
deferal.Complete();

var sample = MediaStreamSample.CreateFromDirect3D11Surface(frame.Surface, timeStamp);
args.Request.Sample = sample;
}

}
catch (Exception e)
{
Expand All @@ -139,11 +245,21 @@ private void OnMediaStreamSourceSampleRequested(MediaStreamSource sender, MediaS
}
}



private void OnMediaStreamSourceStarting(MediaStreamSource sender, MediaStreamSourceStartingEventArgs args)
{
MediaStreamSourceStartingRequest request = args.Request;

using (var frame = _frameGenerator.WaitForNewFrame())
{
args.Request.SetActualStartPosition(frame.SystemRelativeTime);
timeOffset = frame.SystemRelativeTime;
//request.SetActualStartPosition(frame.SystemRelativeTime);
}
_audioGraph?.Start();
using (var audioFrame = _frameOutputNode.GetFrame())
{
timeOffset = timeOffset + audioFrame.RelativeTime.GetValueOrDefault();
}
}

Expand All @@ -153,9 +269,17 @@ private void OnMediaStreamSourceStarting(MediaStreamSource sender, MediaStreamSo
private CaptureFrameWait _frameGenerator;

private VideoStreamDescriptor _videoDescriptor;
private AudioStreamDescriptor _audioDescriptor;
private MediaStreamSource _mediaStreamSource;
private MediaTranscoder _transcoder;
private bool _isRecording;
private bool _closed = false;

// audio graph and nodes
private AudioGraph _audioGraph;
private AudioDeviceInputNode _deviceInputNode;
private AudioFrameOutputNode _frameOutputNode;
private TimeSpan timeOffset = new TimeSpan();

}
}
2 changes: 2 additions & 0 deletions SimpleRecorder/Package.appxmanifest
Original file line number Diff line number Diff line change
Expand Up @@ -24,5 +24,7 @@
</Applications>
<Capabilities>
<uap6:Capability Name="graphicsCapture" />
<uap:Capability Name="videosLibrary" />
<DeviceCapability Name="microphone" />
</Capabilities>
</Package>
2 changes: 1 addition & 1 deletion SimpleRecorder/SimpleRecorder.csproj
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
<AssemblyName>SimpleRecorder</AssemblyName>
<DefaultLanguage>en-US</DefaultLanguage>
<TargetPlatformIdentifier>UAP</TargetPlatformIdentifier>
<TargetPlatformVersion Condition=" '$(TargetPlatformVersion)' == '' ">10.0.18362.0</TargetPlatformVersion>
<TargetPlatformVersion Condition=" '$(TargetPlatformVersion)' == '' ">10.0.17763.0</TargetPlatformVersion>
<TargetPlatformMinVersion>10.0.17763.0</TargetPlatformMinVersion>
<MinimumVisualStudioVersion>14</MinimumVisualStudioVersion>
<FileAlignment>512</FileAlignment>
Expand Down