Skip to content

Commit 9614919

Browse files
authored
Merge pull request #321 from numandev1/fix/iphone-16-compression
fix: iphone 16 and pro max compression issue
2 parents 1cbe1cb + 6843e64 commit 9614919

File tree

1 file changed

+17
-15
lines changed

1 file changed

+17
-15
lines changed

ios/Video/NextLevelSessionExporter.swift

Lines changed: 17 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -254,36 +254,31 @@ extension NextLevelSessionExporter {
254254
self._reader?.startReading()
255255
self._writer?.startSession(atSourceTime: self.timeRange.start)
256256

257-
let audioSemaphore = DispatchSemaphore(value: 0)
258-
let videoSemaphore = DispatchSemaphore(value: 0)
257+
let dispatchGroup = DispatchGroup()
259258

260259
let videoTracks = asset.tracks(withMediaType: AVMediaType.video)
261260
if let videoInput = self._videoInput,
262261
let videoOutput = self._videoOutput,
263262
videoTracks.count > 0 {
263+
dispatchGroup.enter()
264264
videoInput.requestMediaDataWhenReady(on: self._inputQueue, using: {
265265
if self.encode(readySamplesFromReaderOutput: videoOutput, toWriterInput: videoInput) == false {
266-
videoSemaphore.signal()
266+
dispatchGroup.leave()
267267
}
268268
})
269-
} else {
270-
videoSemaphore.signal()
271-
}
269+
}
272270

273271
if let audioInput = self._audioInput,
274272
let audioOutput = self._audioOutput {
273+
dispatchGroup.enter()
275274
audioInput.requestMediaDataWhenReady(on: self._inputQueue, using: {
276275
if self.encode(readySamplesFromReaderOutput: audioOutput, toWriterInput: audioInput) == false {
277-
audioSemaphore.signal()
276+
dispatchGroup.leave()
278277
}
279278
})
280-
} else {
281-
audioSemaphore.signal()
282-
}
279+
}
283280

284-
DispatchQueue.global().async {
285-
audioSemaphore.wait()
286-
videoSemaphore.wait()
281+
dispatchGroup.notify(queue: .global()) {
287282
DispatchQueue.main.async {
288283
self.finish()
289284
}
@@ -369,13 +364,20 @@ extension NextLevelSessionExporter {
369364

370365
private func setupAudioOutput(withAsset asset: AVAsset) {
371366
let audioTracks = asset.tracks(withMediaType: AVMediaType.audio)
367+
var audioTracksToUse: [AVAssetTrack] = []
372368

373369
guard audioTracks.count > 0 else {
374370
self._audioOutput = nil
375371
return
376372
}
377-
378-
self._audioOutput = AVAssetReaderAudioMixOutput(audioTracks: audioTracks, audioSettings: nil)
373+
// Remove APAC tracks
374+
for audioTrack in audioTracks {
375+
let mediaSubtypes = audioTrack.formatDescriptions.filter { CMFormatDescriptionGetMediaType($0 as! CMFormatDescription) == kCMMediaType_Audio }.map { CMFormatDescriptionGetMediaSubType($0 as! CMFormatDescription) }
376+
for mediaSubtype in mediaSubtypes where mediaSubtype != kAudioFormatAPAC {
377+
audioTracksToUse.append(audioTrack)
378+
}
379+
}
380+
self._audioOutput = AVAssetReaderAudioMixOutput(audioTracks: audioTracksToUse, audioSettings: nil)
379381
self._audioOutput?.alwaysCopiesSampleData = false
380382
self._audioOutput?.audioMix = self.audioMix
381383
if let reader = self._reader,

0 commit comments

Comments
 (0)