diff --git a/XcodeLibraryProject/iVidCapPro/GPUImageMovieWriter.m b/XcodeLibraryProject/iVidCapPro/GPUImageMovieWriter.m index 72839bc..ecb95d0 100755 --- a/XcodeLibraryProject/iVidCapPro/GPUImageMovieWriter.m +++ b/XcodeLibraryProject/iVidCapPro/GPUImageMovieWriter.m @@ -61,8 +61,6 @@ @interface GPUImageMovieWriter () GLuint inputTextureForMovieRendering; - GLubyte *frameData; - CMTime startTime, previousFrameTime; BOOL isRecording; @@ -195,11 +193,6 @@ - (id)initWithMovieURL:(NSURL *)newMovieURL size:(CGSize)newSize fileType:(NSStr - (void)dealloc; { [self destroyDataFBO]; - - if (frameData != NULL) - { - free(frameData); - } } #pragma mark - @@ -210,9 +203,7 @@ - (void)initializeMovieWithOutputSettings:(NSMutableDictionary *)outputSettings; isRecording = NO; self.enabled = YES; - frameData = (GLubyte *) malloc((int)videoSize.width * (int)videoSize.height * 4); -// frameData = (GLubyte *) calloc(videoSize.width * videoSize.height * 4, sizeof(GLubyte)); NSError *error = nil; assetWriter = [[AVAssetWriter alloc] initWithURL:movieURL fileType:fileType error:&error]; if (error != nil) diff --git a/XcodeLibraryProject/iVidCapPro/iVidCap.h b/XcodeLibraryProject/iVidCapPro/iVidCap.h index e707962..1165a06 100644 --- a/XcodeLibraryProject/iVidCapPro/iVidCap.h +++ b/XcodeLibraryProject/iVidCapPro/iVidCap.h @@ -46,7 +46,8 @@ static int Demo_FrameLimit = 10 * 30; // set later based on actual enum AudioCapture { No_Audio = 0, Audio = 1, - Audio_Plus_Mic = 2 + Audio_Plus_Mic = 2, + Audio_Mic = 3 }; // This enum must be kept in sync with the VideoDisposition enum in iVidCap.cs. @@ -75,7 +76,7 @@ enum VideoCaptureFramerateLock { Throttled = 2 }; -@interface ivcp_VideoRecorder : NSObject { +@interface ivcp_VideoRecorder : NSObject { // Video attributes. @@ -124,6 +125,11 @@ enum VideoCaptureFramerateLock { AVAsset* userAudioAsset2; AVAsset* mixedAudioAsset; + // Mic audio recording. + AVAudioRecorder * micAudioRecorder; + BOOL bMicAudioRecordFinished; + BOOL bMovieWriterFinished; + // Debug. BOOL showDebug; diff --git a/XcodeLibraryProject/iVidCapPro/iVidCap.mm b/XcodeLibraryProject/iVidCapPro/iVidCap.mm index 75951fd..f3ea726 100644 --- a/XcodeLibraryProject/iVidCapPro/iVidCap.mm +++ b/XcodeLibraryProject/iVidCapPro/iVidCap.mm @@ -310,6 +310,8 @@ - (void)dealloc { finalVideoFileName = nil; } + [self micAudioRecorderKill]; + strcpy(iVidCapGameObject, ""); [super dealloc]; @@ -587,11 +589,17 @@ -(BOOL) beginRecordingSession { glFinish(); [EAGLContext setCurrentContext: unity_context]; + if(captureAudio == Audio_Mic) { + [self micAudioRecorderStart]; + bMicAudioRecordFinished = NO; + } + // Set the video recording start time. recordStartTime = [[NSDate alloc] init]; [recordStartTime retain]; isRecording = true; + bMovieWriterFinished = NO; if (showDebug) NSLog(@"iVidCapPro - beginRecordingSession exiting."); @@ -625,11 +633,31 @@ - (int) endRecordingSession: (VideoDisposition) action AddAudioFile1:(NSString*) // When the movie writer is finished, we'll continue completing the recording // session. [movieWriter finishRecordingWithCompletionHandler:^{ - [self movieWriterCompleteHandler];}]; + bMovieWriterFinished = YES; + [self checkAllRecordingsAreFinished]; + }]; + + if(captureAudio == Audio_Mic) { + [self micAudioRecorderStop]; + } return 0; } +- (void)checkAllRecordingsAreFinished { + + BOOL bFinaliseVideo = YES; + bFinaliseVideo = bFinaliseVideo && bMovieWriterFinished; + if(captureAudio == Audio_Mic) { + bFinaliseVideo = bFinaliseVideo && bMicAudioRecordFinished; + } + if(bFinaliseVideo == NO) { + return; // still waiting on either the video or audio to finish before they can be mixed together. + } + + [self movieWriterCompleteHandler]; +} + // This is the continuation of endRecordingSession. // It's called when the GPUImage movie writer has completed writing the movie. - (void) movieWriterCompleteHandler { @@ -655,7 +683,7 @@ - (void) movieWriterCompleteHandler { if (showDebug) { NSLog(@"iVidCapPro - endRecordingSession tempVideoFileName=%@\n", tempVideoFileName); NSLog(@"iVidCapPro - endRecordingSession finalVideoFileName=%@\n", finalVideoFileName); - if (captureAudio == Audio || captureAudio == Audio_Plus_Mic) + if (captureAudio == Audio || captureAudio == Audio_Plus_Mic || captureAudio == Audio_Mic) NSLog(@"iVidCapPro - endRecordingSession capturedAudioFileName=%@\n", capturedAudioFileName); else NSLog(@"iVidCapPro - endRecordingSession capturedAudioFileName=not in use\n"); @@ -669,7 +697,7 @@ - (void) movieWriterCompleteHandler { NSLog(@"iVidCapPro - endRecordingSession userAudioFileName2=not in use\n"); } - if (captureAudio == Audio || captureAudio == Audio_Plus_Mic || userAudioFile1 != nil || userAudioFile2 != nil) { + if (captureAudio == Audio || captureAudio == Audio_Plus_Mic || captureAudio == Audio_Mic || userAudioFile1 != nil || userAudioFile2 != nil) { // We have audio. Mix the audio and video. [self createVideoWithAudio]; @@ -711,7 +739,7 @@ - (void) createVideoWithAudio { NSURL* userAudioFileURL2 = nil; NSURL* capturedAudioFileURL = nil; - if (captureAudio == Audio || captureAudio == Audio_Plus_Mic) { + if (captureAudio == Audio || captureAudio == Audio_Plus_Mic || captureAudio == Audio_Mic) { // We're capturing audio from scene. Get the URL for the file. capturedAudioFileURL = [self getDocumentsFileURL:capturedAudioFileName]; } @@ -1322,6 +1350,65 @@ - (void)exportDidFinish:(AVAssetExportSession*)session NSLog(@"iVidCapPro - exportDidFinish: exiting."); } +- (void)micAudioRecorderKill { + if(micAudioRecorder == nil) { + return; + } + [micAudioRecorder stop]; + [micAudioRecorder setDelegate:nil]; + [micAudioRecorder release]; + micAudioRecorder = nil; +} + +- (void)micAudioRecorderStart { + [self micAudioRecorderKill]; + + NSError * error = nil; + + AVAudioSession * audioSession = [AVAudioSession sharedInstance]; + [audioSession setCategory: AVAudioSessionCategoryPlayAndRecord + withOptions: AVAudioSessionCategoryOptionDefaultToSpeaker + error: &error]; + if(error){ + NSLog(@"audioSession: %@ %li %@", [error domain], [error code], [[error userInfo] description]); + return; + } + [audioSession setActive:YES error:&error]; + if(error){ + NSLog(@"audioSession: %@ %li %@", [error domain], [error code], [[error userInfo] description]); + return; + } + + NSMutableDictionary * recordSetting = [NSMutableDictionary dictionary]; + [recordSetting setValue :[NSNumber numberWithInt:kAudioFormatLinearPCM] forKey:AVFormatIDKey]; + [recordSetting setValue:[NSNumber numberWithFloat:44100.0] forKey:AVSampleRateKey]; + [recordSetting setValue:[NSNumber numberWithInt: 2] forKey:AVNumberOfChannelsKey]; + [recordSetting setValue :[NSNumber numberWithInt:16] forKey:AVLinearPCMBitDepthKey]; + [recordSetting setValue :[NSNumber numberWithBool:NO] forKey:AVLinearPCMIsBigEndianKey]; + [recordSetting setValue :[NSNumber numberWithBool:NO] forKey:AVLinearPCMIsFloatKey]; + + NSURL * url = [self getDocumentsFileURL:capturedAudioFileName]; + micAudioRecorder = [[ AVAudioRecorder alloc] initWithURL:url settings:recordSetting error:&error]; + if(error){ + NSLog(@"micAudioRecorder: %@ %li %@", [error domain], [error code], [[error userInfo] description]); + return; + } + + [micAudioRecorder setDelegate:self]; + [micAudioRecorder record]; +} + +- (void)micAudioRecorderStop { + [micAudioRecorder stop]; +} + +- (void)audioRecorderDidFinishRecording:(AVAudioRecorder *)recorder + successfully:(BOOL)flag { + + bMicAudioRecordFinished = YES; + [self checkAllRecordingsAreFinished]; +} + @end diff --git a/iVidCapProUnityProject/Assets/Plugins/iVidCapPro/iVidCapPro.cs b/iVidCapProUnityProject/Assets/Plugins/iVidCapPro/iVidCapPro.cs index 2d769ee..435abd0 100644 --- a/iVidCapProUnityProject/Assets/Plugins/iVidCapPro/iVidCapPro.cs +++ b/iVidCapProUnityProject/Assets/Plugins/iVidCapPro/iVidCapPro.cs @@ -495,7 +495,13 @@ public enum CaptureAudio { /// /// Audio from the scene and microphone will be recorded in addition to video. /// - Audio_Plus_Mic = 2 + Audio_Plus_Mic = 2, + + + /// + /// Audio from the microphone only will be recorded in addition to video. + /// + Audio_Mic = 3 } ///