Skip to content
Merged
Show file tree
Hide file tree
Changes from 7 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 6 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,6 +1,12 @@

# Changelog

[1.0.4] - 2025-04-29
* Synced flutter-webrtc v0.13.2
* [iOS/Android]feat: Media Recorder implementation Android and iOS (#1810)
* [Wndows] fix: Pickup registrar for plugin by plugin registrar manager (#1752)
* [Linux] fix: add task runner for linux. (#1821)

[1.0.3] - 2025-04-11
* Reverted to using `onSurfaceDestroyed` in `SurfaceTextureRenderer` for compatibility with Flutter 3.27.

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
import android.app.Activity;
import android.app.Fragment;
import android.app.FragmentTransaction;
import android.content.ContentResolver;
import android.content.ContentValues;
import android.content.Context;
import android.content.Intent;
Expand All @@ -13,12 +14,14 @@
import android.media.AudioDeviceInfo;
import android.media.projection.MediaProjection;
import android.media.projection.MediaProjectionManager;
import android.net.Uri;
import android.os.Build;
import android.os.Build.VERSION;
import android.os.Build.VERSION_CODES;
import android.os.Bundle;
import android.os.Handler;
import android.os.Looper;
import android.os.ParcelFileDescriptor;
import android.os.ResultReceiver;
import android.provider.MediaStore;
import android.util.Log;
Expand Down Expand Up @@ -72,6 +75,9 @@
import org.webrtc.audio.JavaAudioDeviceModule;

import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
Expand Down Expand Up @@ -1046,22 +1052,64 @@ void startRecordingToFile(
mediaRecorders.append(id, mediaRecorder);
}

void stopRecording(Integer id) {
MediaRecorderImpl mediaRecorder = mediaRecorders.get(id);
if (mediaRecorder != null) {
mediaRecorder.stopRecording();
mediaRecorders.remove(id);
File file = mediaRecorder.getRecordFile();
if (file != null) {
ContentValues values = new ContentValues(3);
values.put(MediaStore.Video.Media.TITLE, file.getName());
values.put(MediaStore.Video.Media.MIME_TYPE, "video/mp4");
values.put(MediaStore.Video.Media.DATA, file.getAbsolutePath());
applicationContext
.getContentResolver()
.insert(MediaStore.Video.Media.EXTERNAL_CONTENT_URI, values);
void stopRecording(Integer id, String albumName) {
try {
MediaRecorderImpl mediaRecorder = mediaRecorders.get(id);
if (mediaRecorder != null) {
mediaRecorder.stopRecording();
mediaRecorders.remove(id);
File file = mediaRecorder.getRecordFile();
Uri collection;

if (file != null) {
ContentValues values = new ContentValues();
values.put(MediaStore.Video.Media.TITLE, file.getName());
values.put(MediaStore.Video.Media.DISPLAY_NAME, file.getName());
values.put(MediaStore.Video.Media.ALBUM, albumName);
values.put(MediaStore.Video.Media.MIME_TYPE, "video/mp4");
values.put(MediaStore.Video.Media.DATE_ADDED, System.currentTimeMillis() / 1000);
values.put(MediaStore.Video.Media.DATE_TAKEN, System.currentTimeMillis());

//Android version above 9 MediaStore uses RELATIVE_PATH
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q) {
values.put(MediaStore.Video.Media.RELATIVE_PATH, "Movies/" + albumName);
values.put(MediaStore.Video.Media.IS_PENDING, 1);

collection = MediaStore.Video.Media.getContentUri(MediaStore.VOLUME_EXTERNAL_PRIMARY);
} else {
//Android version 9 and below MediaStore uses DATA
values.put(MediaStore.Video.Media.DATA, "/storage/emulated/0/Movies/" + albumName + "/" + file.getName());

collection = MediaStore.Video.Media.EXTERNAL_CONTENT_URI;
}

ContentResolver resolver = applicationContext.getContentResolver();
Uri uriSavedMedia = resolver.insert(collection, values);

assert uriSavedMedia != null;
ParcelFileDescriptor pfd = resolver.openFileDescriptor(uriSavedMedia, "w");
assert pfd != null;
FileOutputStream out = new FileOutputStream(pfd.getFileDescriptor());

InputStream in = new FileInputStream(file);

byte[] buf = new byte[8192];
int len;

while ((len = in.read(buf)) > 0) {
out.write(buf, 0, len);
}

out.close();
in.close();
pfd.close();
values.clear();
}
}
} catch(Exception e){

}

}


Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -793,7 +793,8 @@ public void onMethodCall(MethodCall call, @NonNull Result notSafeResult) {
break;
case "stopRecordToFile":
Integer recorderId = call.argument("recorderId");
getUserMediaImpl.stopRecording(recorderId);
String albumName = call.argument("albumName");
getUserMediaImpl.stopRecording(recorderId, albumName);
result.success(null);
break;
case "captureFrame": {
Expand Down
6 changes: 6 additions & 0 deletions common/darwin/Classes/FlutterRTCAudioSink-Interface.h
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
void RTCAudioSinkCallback (void *object,
const void *audio_data,
int bits_per_sample,
int sample_rate,
size_t number_of_channels,
size_t number_of_frames);
14 changes: 14 additions & 0 deletions common/darwin/Classes/FlutterRTCAudioSink.h
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
#import <Foundation/Foundation.h>
#import <CoreMedia/CoreMedia.h>
#import <WebRTC/WebRTC.h>

@interface FlutterRTCAudioSink : NSObject

@property (nonatomic, copy) void (^bufferCallback)(CMSampleBufferRef);
@property (nonatomic) CMAudioFormatDescriptionRef format;

- (instancetype) initWithAudioTrack:(RTCAudioTrack*)audio;

- (void) close;

@end
67 changes: 67 additions & 0 deletions common/darwin/Classes/FlutterRTCAudioSink.mm
Original file line number Diff line number Diff line change
@@ -0,0 +1,67 @@
#import <AVFoundation/AVFoundation.h>
#import "FlutterRTCAudioSink.h"
#import "RTCAudioSource+Private.h"
#include "media_stream_interface.h"
#include "audio_sink_bridge.cpp"

@implementation FlutterRTCAudioSink {
AudioSinkBridge *_bridge;
webrtc::AudioSourceInterface* _audioSource;
}

- (instancetype) initWithAudioTrack:(RTCAudioTrack* )audio {
self = [super init];
rtc::scoped_refptr<webrtc::AudioSourceInterface> audioSourcePtr = audio.source.nativeAudioSource;
_audioSource = audioSourcePtr.get();
_bridge = new AudioSinkBridge((void*)CFBridgingRetain(self));
_audioSource->AddSink(_bridge);
return self;
}

- (void) close {
_audioSource->RemoveSink(_bridge);
delete _bridge;
_bridge = nil;
_audioSource = nil;
}

void RTCAudioSinkCallback (void *object, const void *audio_data, int bits_per_sample, int sample_rate, size_t number_of_channels, size_t number_of_frames)
{
AudioBufferList audioBufferList;
AudioBuffer audioBuffer;
audioBuffer.mData = (void*) audio_data;
audioBuffer.mDataByteSize = bits_per_sample / 8 * number_of_channels * number_of_frames;
audioBuffer.mNumberChannels = number_of_channels;
audioBufferList.mNumberBuffers = 1;
audioBufferList.mBuffers[0] = audioBuffer;
AudioStreamBasicDescription audioDescription;
audioDescription.mBytesPerFrame = bits_per_sample / 8 * number_of_channels;
audioDescription.mBitsPerChannel = bits_per_sample;
audioDescription.mBytesPerPacket = bits_per_sample / 8 * number_of_channels;
audioDescription.mChannelsPerFrame = number_of_channels;
audioDescription.mFormatID = kAudioFormatLinearPCM;
audioDescription.mFormatFlags = kAudioFormatFlagIsSignedInteger | kAudioFormatFlagsNativeEndian | kAudioFormatFlagIsPacked;
audioDescription.mFramesPerPacket = 1;
audioDescription.mReserved = 0;
audioDescription.mSampleRate = sample_rate;
CMAudioFormatDescriptionRef formatDesc;
CMAudioFormatDescriptionCreate(kCFAllocatorDefault, &audioDescription, 0, nil, 0, nil, nil, &formatDesc);
CMSampleBufferRef buffer;
CMSampleTimingInfo timing;
timing.decodeTimeStamp = kCMTimeInvalid;
timing.presentationTimeStamp = CMTimeMake(0, sample_rate);
timing.duration = CMTimeMake(1, sample_rate);
CMSampleBufferCreate(kCFAllocatorDefault, nil, false, nil, nil, formatDesc, number_of_frames * number_of_channels, 1, &timing, 0, nil, &buffer);
CMSampleBufferSetDataBufferFromAudioBufferList(buffer, kCFAllocatorDefault, kCFAllocatorDefault, 0, &audioBufferList);
@autoreleasepool {
FlutterRTCAudioSink* sink = (__bridge FlutterRTCAudioSink*)(object);
sink.format = formatDesc;
if (sink.bufferCallback != nil) {
sink.bufferCallback(buffer);
} else {
NSLog(@"Buffer callback is nil");
}
}
}

@end
2 changes: 2 additions & 0 deletions common/darwin/Classes/FlutterRTCFrameCapturer.h
Original file line number Diff line number Diff line change
Expand Up @@ -12,4 +12,6 @@
toPath:(NSString*)path
result:(FlutterResult)result;

+ (CVPixelBufferRef)convertToCVPixelBuffer:(RTCVideoFrame *) frame;

@end
4 changes: 2 additions & 2 deletions common/darwin/Classes/FlutterRTCFrameCapturer.m
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ - (void)renderFrame:(nullable RTCVideoFrame*)frame {
CVPixelBufferRef pixelBufferRef;
bool shouldRelease;
if (![buffer isKindOfClass:[RTCCVPixelBuffer class]]) {
pixelBufferRef = [self convertToCVPixelBuffer:frame];
pixelBufferRef = [FlutterRTCFrameCapturer convertToCVPixelBuffer:frame];
shouldRelease = true;
} else {
pixelBufferRef = ((RTCCVPixelBuffer*)buffer).pixelBuffer;
Expand Down Expand Up @@ -108,7 +108,7 @@ - (void)renderFrame:(nullable RTCVideoFrame*)frame {
});
}

- (CVPixelBufferRef)convertToCVPixelBuffer:(RTCVideoFrame*)frame {
+ (CVPixelBufferRef)convertToCVPixelBuffer:(RTCVideoFrame*)frame {
id<RTCI420Buffer> i420Buffer = [frame.buffer toI420];
CVPixelBufferRef outputPixelBuffer;
size_t w = (size_t)roundf(i420Buffer.width);
Expand Down
24 changes: 24 additions & 0 deletions common/darwin/Classes/FlutterRTCMediaRecorder.h
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
#if TARGET_OS_IPHONE
#import <Flutter/Flutter.h>
#elif TARGET_OS_OSX
#import <FlutterMacOS/FlutterMacOS.h>
#endif
#import <WebRTC/WebRTC.h>

@import Foundation;
@import AVFoundation;

@interface FlutterRTCMediaRecorder : NSObject <RTCVideoRenderer>

@property(nonatomic, strong) RTCVideoTrack* _Nullable videoTrack;
@property(nonatomic, strong) NSURL* _Nonnull output;
@property(nonatomic, strong) AVAssetWriter* _Nullable assetWriter;
@property(nonatomic, strong) AVAssetWriterInput* _Nullable writerInput;

- (instancetype _Nonnull)initWithVideoTrack:(RTCVideoTrack* _Nullable)video
audioTrack:(RTCAudioTrack* _Nullable)audio
outputFile:(NSURL* _Nonnull)out;

- (void)stop:(_Nonnull FlutterResult)result;

@end
Loading
Loading