Skip to content

AVFoundation tvOS xcode26.0 b6

Rolf Bjarne Kvinge edited this page Aug 19, 2025 · 2 revisions

#AVFoundation.framework

diff -ruN /Applications/Xcode_26.0.0-beta5.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureAudioDataOutput.h /Applications/Xcode_26.0.0-beta6.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureAudioDataOutput.h
--- /Applications/Xcode_26.0.0-beta5.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureAudioDataOutput.h	2025-07-27 03:47:36
+++ /Applications/Xcode_26.0.0-beta6.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureAudioDataOutput.h	2025-08-13 08:04:07
@@ -86,22 +86,13 @@
  */
 @property(nonatomic, copy, null_resettable) NSDictionary<NSString *, id> *audioSettings API_UNAVAILABLE(ios, macCatalyst, watchos, tvos, visionos);
 
-/*!
- @property spatialAudioChannelLayoutTag
- @abstract
-    Specifies the audio channel layout tag that describes the audio channel layout to be output by the AVCaptureAudioDataOutput.
- 
- @discussion
-    The value of this property is from the AudioChannelLayoutTag enumeration defined in CoreAudioBaseTypes.h. Currently, the only two supported values are kAudioChannelLayoutTag_Stereo or ( kAudioChannelLayoutTag_HOA_ACN_SN3D | 4 ) which will provide either a Stereo channel pair or four channels of First Order Ambisonic audio data output. The default value is kAudioChannelLayoutTag_Unknown which results in an AudioChannelLayout determined by the AVCaptureDeviceInput's configuration.
- 
-    The rules for allowed values in a given AVCaptureSession are as follows:
- 
-    When the associated AVCaptureDeviceInput's multichannelAudioMode property is set to AVCaptureMultichannelAudioModeFirstOrderAmbisonics, the AVCaptureSession can support up to two AVCaptureAudioDataOutput instances. If a single AVCaptureAudioDataOutput is present it can produce either four channels of First Order Ambisonic audio or two channels of Stereo audio. If two AVCaptureAudioDataOutputs are present, one of them must output four channels of First Order Ambisonic audio and the other must output two channels of Stereo audio.
- 
-    When the associated AVCaptureDeviceInput's multichannelAudioMode property is set to anything other than AVCaptureMultichannelAudioModeFirstOrderAmbisonics, there must be only one AVCaptureAudioDataOutput present in the AVCaptureSession with its spatialAudioChannelLayoutTag property set to kAudioChannelLayoutTag_Unknown or left at the default value.
- 
-    These rules are validated when a client calls -[AVCaptureSession startRunning:] or -[AVCaptureSession commitConfiguration:]. If the validation fails an exception will be thrown indicating the invalid setting and the session will not start running.
- */
+/// The audio channel layout tag of the audio sample buffers produced by the audio data output.
+///
+/// When you set your audio data output's associated ``AVCaptureDeviceInput/multichannelAudioMode`` property to ``AVCaptureMultichannelAudioModeFirstOrderAmbisonics``, the ``AVCaptureSession`` allows up to two ``AVCaptureAudioDataOutput`` instances to be connected to the First-order Ambisonsics (FOA) input. If you connect a single ``AVCaptureAudioDataOutput`` instance, you must configure its ``AVCaptureAudioDataOutput/spatialAudioChannelLayoutTag`` property to produce either four channels of FOA audio or two channels of Stereo audio. If you connect two ``AVCaptureAudioDataOutput`` instances, you must configure one to output four channels of FOA audio and the other to output two channels of Stereo audio.
+///
+/// Thus, when you set your associated ``AVCaptureDeviceInput/multichannelAudioMode`` property to ``AVCaptureMultichannelAudioModeFirstOrderAmbisonics``, you must set your connected ``AVCaptureAudioDataOutput`` instance's ``AVCaptureAudioDataOutput/spatialAudioChannelLayoutTag`` property to either `kAudioChannelLayoutTag_Stereo` for stereo, or `(kAudioChannelLayoutTag_HOA_ACN_SN3D | 4)` for FOA (see <doc://com.apple.documentation/documentation/coreaudiotypes/audiochannellayouttag>). When you set your associated ``AVCaptureDeviceInput/multichannelAudioMode`` to any other value, the ``AVCaptureSession`` only supports one ``AVCaptureAudioDataOutput``, and you may only set ``AVCaptureAudioDataOutput/spatialAudioChannelLayoutTag`` to `kAudioChannelLayoutTag_Unknown` (the default value).
+///
+/// Your ``AVCaptureSession`` validates your app's adherence to the the above rules when you call ``AVCaptureSession/startRunning:`` or ``AVCaptureSession/commitConfiguration`` and throws a `NSInvalidArgumentException` if necessary.
 @property(nonatomic) AudioChannelLayoutTag spatialAudioChannelLayoutTag API_AVAILABLE(macos(26.0), ios(26.0), macCatalyst(26.0), tvos(26.0)) API_UNAVAILABLE(visionos) API_UNAVAILABLE(watchos);
 
 /*!
diff -ruN /Applications/Xcode_26.0.0-beta5.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureDevice.h /Applications/Xcode_26.0.0-beta6.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureDevice.h
--- /Applications/Xcode_26.0.0-beta5.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureDevice.h	2025-07-27 03:23:11
+++ /Applications/Xcode_26.0.0-beta6.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureDevice.h	2025-08-08 03:45:02
@@ -1140,47 +1140,32 @@
  */
 @property(nonatomic) CGPoint focusPointOfInterest;
 
-/*!
- @property focusRectOfInterestSupported
- @abstract
-    Indicates whether the receiver supports focus rectangles of interest.
- 
- @discussion
-    The receiver's focusRectOfInterestSupported property can only be set if this property returns YES.
- */
+/// Whether the receiver supports focus rectangles of interest.
+///
+/// You may only set the device's ``focusRectOfInterest`` property if this property returns `true`.
 @property(nonatomic, readonly, getter=isFocusRectOfInterestSupported) BOOL focusRectOfInterestSupported API_AVAILABLE(macos(26.0), ios(26.0), macCatalyst(26.0), tvos(26.0)) API_UNAVAILABLE(visionos) API_UNAVAILABLE(watchos);
 
-/*!
- @property minFocusRectOfInterestSize
- @abstract
-    Returns the minimum size that can be used when specifying a rectangle of interest.
- 
- @discussion
-    The size returned is in normalized coordinates, and will depend on the current active format. If isFocusRectOfInterestSupported returns NO, this property will return { 0, 0 }.
- */
+/// The minimum size you may use when specifying a rectangle of interest.
+///
+/// The size returned is in normalized coordinates, and depends on the current ``AVCaptureDevice/activeFormat``. If ``focusRectOfInterestSupported`` returns `false`, this property returns { 0, 0 }.
 @property(nonatomic, readonly) CGSize minFocusRectOfInterestSize API_AVAILABLE(macos(26.0), ios(26.0), macCatalyst(26.0), tvos(26.0)) API_UNAVAILABLE(visionos) API_UNAVAILABLE(watchos);
 
-/*!
- @property focusRectOfInterest
- @abstract
-    Indicates current focus rectangle of interest of the receiver, if it has one.
- 
- @discussion
-    The value of this property is a CGRect that determines the receiver's focus rectangle of interest, if it has one. It is used as an alternative to -setFocusPointOfInterest:, as it allows for both a location and size to be specified. A value of CGRectMake(0, 0, 1, 1) indicates that the receiver should use the entire field of view when determining the focus, while CGRectMake(0, 0, 0.25, 0.25) would indicate the top left sixteenth, and CGRectMake(0.75, 0.75, 0.25, 0.25) would indicate the bottom right sixteenth. -setFocusRectOfInterest: throws an NSInvalidArgumentException if isFocusRectOfInterestSupported returns NO. -setFocusRectOfInterest: throws an NSInvalidArgumentException if the size of the provided rectangle is smaller than that returned by minFocusRectOfInterestSize. -setFocusRectOfInterest: throws an NSGenericException if called without first obtaining exclusive access to the receiver using lockForConfiguration:. -setFocusRectOfInterest: will update the receiver's focusPointOfInterest to be the center of the rectangle of interest. If the client later sets the receiver's focusPointOfInterest, the focusRectOfInterest will reset to the default rectangle of interest for the new focus point of interest. If the client changes the activeFormat, the point of interest and rectangle of interest will revert to their default values. Clients can observe automatic changes to the receiver's focusRectOfInterest by key value observing this property. Note that setting focusRectOfInterest alone does not initiate a focus operation. After setting focusRectOfInterest, call -setFocusMode: to apply the new rectangle of interest.
- */
+/// The device's current focus rectangle of interest, if it has one.
+///
+/// The value of this property is a ``CGRect`` determining the device's focus rectangle of interest. Use this as an alternative to setting ``focusPointOfInterest``, as it allows you to specify both a location and size. For example, a value of `CGRectMake(0, 0, 1, 1)` tells the device to use the entire field of view when determining the focus, while `CGRectMake(0, 0, 0.25, 0.25)` indicates the top left sixteenth, and `CGRectMake(0.75, 0.75, 0.25, 0.25)` indicates the bottom right sixteenth. Setting ``focusRectOfInterest`` throws an `NSInvalidArgumentException` if ``focusRectOfInterestSupported`` returns `false`. Setting ``focusRectOfInterest`` throws an `NSInvalidArgumentException` if your provided rectangle's size is smaller than the ``minFocusRectOfInterestSize``. Setting ``focusRectOfInterest`` throws an `NSGenericException` if you call it without first obtaining exclusive access to the device using ``AVCaptureDevice/lockForConfiguration:``. Setting ``focusRectOfInterest`` updates the device's ``focusPointOfInterest`` to the center of your provided rectangle of interest. If you later set the device's ``focusPointOfInterest``, the ``focusRectOfInterest`` resets to the default sized rectangle of interest for the new focus point of interest. If you change your ``AVCaptureDevice/activeFormat``, the point of interest and rectangle of interest both revert to their default values. You can observe automatic changes to the device's ``focusRectOfInterest`` by key-value observing this property.
+///
+/// - Note: Setting ``focusRectOfInterest`` alone does not initiate a focus operation. After setting ``focusRectOfInterest``, set ``focusMode`` to apply the new rectangle of interest.
 @property(nonatomic) CGRect focusRectOfInterest API_AVAILABLE(macos(26.0), ios(26.0), macCatalyst(26.0), tvos(26.0)) API_UNAVAILABLE(visionos) API_UNAVAILABLE(watchos);
 
-/*!
- @method defaultRectForFocusPointOfInterest:
- @abstract
-    Returns the default rectangle of interest that is used for a given focus point of interest.
- 
- @param pointOfInterest
-    Point of interest for which we are returning the default rectangle of interest.
- 
- @discussion
-    Pass (0.5, 0.5) to get the focus rectangle of interest used for the default focus point of interest at (0.5, 0.5); note that the particular default rectangle returned will depend on the current focus mode. This method returns CGRectNull if isFocusRectOfInterestSupported returns NO.
- */
+/// The default rectangle of interest used for a given focus point of interest.
+///
+/// - Parameter pointOfInterest: The point of interest for which you want the default rectangle of interest.
+///
+/// For example, pass `(0.5, 0.5)` to get the focus rectangle of interest used for the default focus point of interest at `(0.5, 0.5)`.
+///
+/// - Note: The particular default rectangle returned depends on the current focus mode.
+///
+/// This method returns `CGRectNull` if ``focusRectOfInterestSupported`` returns `false`.
 - (CGRect)defaultRectForFocusPointOfInterest:(CGPoint)pointOfInterest API_AVAILABLE(macos(26.0), ios(26.0), macCatalyst(26.0), tvos(26.0)) API_UNAVAILABLE(visionos) API_UNAVAILABLE(watchos);
 
 /*!
@@ -1279,7 +1264,7 @@
  @param lensPosition
     The lens position, as described in the documentation for the lensPosition property. A value of AVCaptureLensPositionCurrent can be used to indicate that the caller does not wish to specify a value for lensPosition.
  @param handler
-    A block to be called when lensPosition has been set to the value specified and focusMode is set to AVCaptureFocusModeLocked. If setFocusModeLockedWithLensPosition:completionHandler: is called multiple times, the completion handlers will be called in FIFO order. The block receives a timestamp which matches that of the first buffer to which all settings have been applied. Note that the timestamp is synchronized to the device clock, and thus must be converted to the master clock prior to comparison with the timestamps of buffers delivered via an AVCaptureVideoDataOutput. The client may pass nil for the handler parameter if knowledge of the operation's completion is not required.
+    A block to be called when lensPosition has been set to the value specified and focusMode is set to AVCaptureFocusModeLocked. If setFocusModeLockedWithLensPosition:completionHandler: is called multiple times, the completion handlers will be called in FIFO order. The block receives a timestamp which matches that of the first buffer to which all settings have been applied. Note that the timestamp is synchronized to the device clock, and thus must be converted to the `AVCaptureSession/synchronizationClock` prior to comparison with the timestamps of buffers delivered via an AVCaptureVideoDataOutput. The client may pass nil for the handler parameter if knowledge of the operation's completion is not required.
  
  @discussion
     This is the only way of setting lensPosition. This method throws an NSRangeException if lensPosition is set to an unsupported level. This method throws an NSGenericException if called without first obtaining exclusive access to the receiver using lockForConfiguration:.
@@ -1296,66 +1281,34 @@
  */
 @property(nonatomic, readonly) NSInteger minimumFocusDistance API_AVAILABLE(macos(12.0), ios(15.0), macCatalyst(15.0), tvos(17.0)) API_UNAVAILABLE(visionos) API_UNAVAILABLE(watchos);
 
-/*!
- @enum AVCaptureCinematicVideoFocusMode
- @abstract
-   Constants indicating the focus behavior when recording a Cinematic Video.
- 
- @constant AVCaptureCinematicVideoFocusModeNone
-    Indicates that no focus mode is specified, in which case weak focus is used as default.
- @constant AVCaptureCinematicVideoFocusModeStrong
-    Indicates that the subject should remain in focus until it exits the scene.
- @constant AVCaptureCinematicVideoFocusModeWeak
-    Indicates that the Cinematic Video algorithm should automatically adjust focus according to the prominence of the subjects in the scene.
- */
+/// Constants indicating the focus behavior when recording a Cinematic Video.
 typedef NS_ENUM(NSInteger, AVCaptureCinematicVideoFocusMode) {
+    /// Indicates that no focus mode is specified, in which case weak focus is used as default.
     AVCaptureCinematicVideoFocusModeNone   = 0,
+    /// Indicates that the subject should remain in focus until it exits the scene.
     AVCaptureCinematicVideoFocusModeStrong = 1,
+    /// Indicates that the Cinematic Video algorithm should automatically adjust focus according to the prominence of the subjects in the scene.
     AVCaptureCinematicVideoFocusModeWeak   = 2,
 } NS_SWIFT_NAME(AVCaptureDevice.CinematicVideoFocusMode) API_AVAILABLE(macos(26.0), ios(26.0), macCatalyst(26.0), tvos(26.0)) API_UNAVAILABLE(visionos) API_UNAVAILABLE(watchos);
 
-/*!
- @method setCinematicVideoTrackingFocusWithDetectedObjectID:focusMode:
- @abstract
-    Focus on and start tracking a detected object.
- 
- @param detectedObjectID
-    ID of the detected object.
- 
- @param focusMode
-    Specify whether to focus strongly or weakly.
- 
- */
+/// Focus on and start tracking a detected object.
+///
+/// - Parameter detectedObjectID: The ID of the detected object.
+/// - Parameter focusMode: Specify whether to focus strongly or weakly.
 - (void)setCinematicVideoTrackingFocusWithDetectedObjectID:(NSInteger)detectedObjectID focusMode:(AVCaptureCinematicVideoFocusMode)focusMode NS_SWIFT_NAME(setCinematicVideoTrackingFocus(detectedObjectID:focusMode:)) API_AVAILABLE(macos(26.0), ios(26.0), macCatalyst(26.0), tvos(26.0)) API_UNAVAILABLE(visionos) API_UNAVAILABLE(watchos);
 
-/*!
- @method setCinematicVideoTrackingFocusAtPoint:focusMode:
- @abstract
-    Focus on and start tracking an object if it can be detected at the region specified by the point.
- 
- @param point
-    A normalized point of interest (i.e., [0,1]) in the coordinate space of the device.
- 
- @param focusMode
-    Specify whether to focus strongly or weakly.
- 
- */
+/// Focus on and start tracking an object if it can be detected at the region specified by the point.
+///
+/// - Parameter point: A normalized point of interest (i.e., [0,1]) in the coordinate space of the device.
+/// - Parameter focusMode: Specify whether to focus strongly or weakly.
 - (void)setCinematicVideoTrackingFocusAtPoint:(CGPoint)point focusMode:(AVCaptureCinematicVideoFocusMode)focusMode NS_SWIFT_NAME(setCinematicVideoTrackingFocus(at:focusMode:)) API_AVAILABLE(macos(26.0), ios(26.0), macCatalyst(26.0), tvos(26.0)) API_UNAVAILABLE(visionos) API_UNAVAILABLE(watchos);
 
-/*!
- @method setCinematicVideoFixedFocusAtPoint:focusMode:
- @abstract
-    Fix focus at a distance.
- 
- @param point
-    A normalized point of interest (i.e., [0,1]) in the coordinate space of the device.
- 
- @param focusMode
-    Specify whether to focus strongly or weakly.
- 
- @discussion
-    The distance at which focus is set is determined internally using signals such as depth data.
- */
+/// Fix focus at a distance.
+///
+/// - Parameter point: A normalized point of interest (i.e., [0,1]) in the coordinate space of the device.
+/// - Parameter focusMode: Specify whether to focus strongly or weakly.
+///
+/// The distance at which focus is set is determined internally using signals such as depth data.
 - (void)setCinematicVideoFixedFocusAtPoint:(CGPoint)point focusMode:(AVCaptureCinematicVideoFocusMode)focusMode NS_SWIFT_NAME(setCinematicVideoFixedFocus(at:focusMode:)) API_AVAILABLE(macos(26.0), ios(26.0), macCatalyst(26.0), tvos(26.0)) API_UNAVAILABLE(visionos) API_UNAVAILABLE(watchos);
 
 @end
@@ -1431,47 +1384,30 @@
  */
 @property(nonatomic) CGPoint exposurePointOfInterest;
 
-/*!
- @property exposureRectOfInterestSupported
- @abstract
-    Indicates whether the receiver supports exposure rectangles of interest.
- 
- @discussion
-    The receiver's exposureRectOfInterestSupported property can only be set if this property returns YES.
- */
+/// Whether the device supports exposure rectangles of interest.
+///
+/// You may only set the device's ``exposureRectOfInterest`` property if this property returns `true`.
 @property(nonatomic, readonly, getter=isExposureRectOfInterestSupported) BOOL exposureRectOfInterestSupported API_AVAILABLE(macos(26.0), ios(26.0), macCatalyst(26.0), tvos(26.0)) API_UNAVAILABLE(visionos) API_UNAVAILABLE(watchos);
 
-/*!
- @property minExposureRectOfInterestSize
- @abstract
-    Returns the minimum size that can be used when specifying a rectangle of interest.
- 
- @discussion
-    The size returned is in normalized coordinates, and will depend on the current active format. If isExposureRectOfInterestSupported returns NO, this property will return { 0, 0 }.
- */
+/// The minimum size you may use when specifying a rectangle of interest.
+///
+/// The size returned is in normalized coordinates, and depends on the current ``AVCaptureDevice/activeFormat``. If ``exposureRectOfInterestSupported`` returns `false`, this property returns { 0, 0 }.
 @property(nonatomic, readonly) CGSize minExposureRectOfInterestSize API_AVAILABLE(macos(26.0), ios(26.0), macCatalyst(26.0), tvos(26.0)) API_UNAVAILABLE(visionos) API_UNAVAILABLE(watchos);
 
-/*!
- @property exposureRectOfInterest
- @abstract
-    Indicates current exposure rectangle of interest of the receiver, if it has one.
- 
- @discussion
-    The value of this property is a CGRect that determines the receiver's exposure rectangle of interest, if it has one. It is used as an alternative to -setExposurePointOfInterest:, as it allows for both a location and size to be specified. A value of CGRectMake(0, 0, 1, 1) indicates that the receiver should use the entire field of view when determining the exposure, while CGRectMake(0, 0, 0.25, 0.25) would indicate the top left sixteenth, and CGRectMake(0.75, 0.75, 0.25, 0.25) would indicate the bottom right sixteenth. -setExposureRectOfInterest: throws an NSInvalidArgumentException if isExposureRectOfInterestSupported returns NO. -setExposureRectOfInterest: throws an NSInvalidArgumentException if the size of the provided rectangle is smaller than that returned by minExposureRectOfInterestSize. -setExposureRectOfInterest: throws an NSGenericException if called without first obtaining exclusive access to the receiver using lockForConfiguration:. -setExposureRectOfInterest: will update the receiver's exposurePointOfInterest to be the center of the rectangle of interest. If the client later sets the receiver's exposurePointOfInterest, the exposureRectOfInterest will reset to the default rectangle of interest for the new exposure point of interest. If the client changes the activeFormat, the point of interest and rectangle of interest will revert to their default values. Clients can observe automatic changes to the receiver's exposureRectOfInterest by key value observing this property. Note that setting exposureRectOfInterest alone does not initiate an exposure operation. After setting exposureRectOfInterest, call -setExposureMode: to apply the new rectangle of interest.
- */
+/// The device's current exposure rectangle of interest, if it has one.
+///
+/// The value of this property is a ``CGRect`` determining the device's exposure rectangle of interest. Use this as an alternative to setting ``exposurePointOfInterest``, as it allows you to specify both a location and size. For example, a value of `CGRectMake(0, 0, 1, 1)` tells the device to use the entire field of view when determining the exposure, while `CGRectMake(0, 0, 0.25, 0.25)` indicates the top left sixteenth, and `CGRectMake(0.75, 0.75, 0.25, 0.25)` indicates the bottom right sixteenth. Setting ``exposureRectOfInterest`` throws an `NSInvalidArgumentException` if ``exposureRectOfInterestSupported`` returns `false`. Setting ``exposureRectOfInterest`` throws an `NSInvalidArgumentException` if your provided rectangle's size is smaller than the ``minExposureRectOfInterestSize``. Setting ``exposureRectOfInterest`` throws an `NSGenericException` if you call it without first obtaining exclusive access to the device using ``AVCaptureDevice/lockForConfiguration:``. Setting ``exposureRectOfInterest`` updates the device's ``exposurePointOfInterest`` to the center of your provided rectangle of interest. If you later set the device's ``exposurePointOfInterest``, the ``exposureRectOfInterest`` resets to the default sized rectangle of interest for the new exposure point of interest. If you change your ``AVCaptureDevice/activeFormat``, the point of interest and rectangle of interest both revert to their default values. You can observe automatic changes to the device's ``exposureRectOfInterest`` by key-value observing this property.
+///
+/// - Note: Setting ``exposureRectOfInterest`` alone does not initiate an exposure operation. After setting ``exposureRectOfInterest``, set ``exposureMode`` to apply the new rectangle of interest.
 @property(nonatomic) CGRect exposureRectOfInterest API_AVAILABLE(macos(26.0), ios(26.0), macCatalyst(26.0), tvos(26.0)) API_UNAVAILABLE(visionos) API_UNAVAILABLE(watchos);
 
-/*!
- @method defaultRectForExposurePointOfInterest:
- @abstract
-    Returns the default rectangle of interest that is used for a given exposure point of interest.
- 
- @param pointOfInterest
-    Point of interest for which we are returning the default rectangle of interest.
- 
- @discussion
-    Pass (0.5, 0.5) to get the exposure rectangle of interest used for the default exposure point of interest at (0.5, 0.5). This method returns CGRectNull if isExposureRectOfInterestSupported returns NO.
- */
+/// The default rectangle of interest used for a given exposure point of interest.
+///
+/// - Parameter pointOfInterest: The point of interest for which you want the default rectangle of interest.
+///
+/// For example, pass `(0.5, 0.5)` to get the exposure rectangle of interest used for the default exposure point of interest at `(0.5, 0.5)`.
+///
+/// This method returns `CGRectNull` if ``exposureRectOfInterestSupported`` returns `false`.
 - (CGRect)defaultRectForExposurePointOfInterest:(CGPoint)pointOfInterest API_AVAILABLE(macos(26.0), ios(26.0), macCatalyst(26.0), tvos(26.0)) API_UNAVAILABLE(visionos) API_UNAVAILABLE(watchos);
 
 /*!
@@ -1568,7 +1504,7 @@
  @param ISO
     The exposure ISO value, as described in the documentation for the ISO property. A value of AVCaptureISOCurrent can be used to indicate that the caller does not wish to specify a value for ISO.
  @param handler
-    A block to be called when both exposureDuration and ISO have been set to the values specified and exposureMode is set to AVCaptureExposureModeCustom. If setExposureModeCustomWithDuration:ISO:completionHandler: is called multiple times, the completion handlers will be called in FIFO order. The block receives a timestamp which matches that of the first buffer to which all settings have been applied. Note that the timestamp is synchronized to the device clock, and thus must be converted to the master clock prior to comparison with the timestamps of buffers delivered via an AVCaptureVideoDataOutput. The client may pass nil for the handler parameter if knowledge of the operation's completion is not required.
+    A block to be called when both exposureDuration and ISO have been set to the values specified and exposureMode is set to AVCaptureExposureModeCustom. If setExposureModeCustomWithDuration:ISO:completionHandler: is called multiple times, the completion handlers will be called in FIFO order. The block receives a timestamp which matches that of the first buffer to which all settings have been applied. Note that the timestamp is synchronized to the device clock, and thus must be converted to the `AVCaptureSession/synchronizationClock` prior to comparison with the timestamps of buffers delivered via an AVCaptureVideoDataOutput. The client may pass nil for the handler parameter if knowledge of the operation's completion is not required.
  
  @discussion
     This is the only way of setting exposureDuration and ISO. This method throws an NSRangeException if either exposureDuration or ISO is set to an unsupported level. This method throws an NSGenericException if called without first obtaining exclusive access to the receiver using lockForConfiguration:. When using AVCapturePhotoOutput to capture photos, note that the photoQualityPrioritization property of AVCapturePhotoSettings defaults to AVCapturePhotoQualityPrioritizationBalanced, which allows photo capture to temporarily override the capture device's ISO and exposureDuration values if the scene is dark enough to warrant some form of multi-image fusion to improve quality. To ensure that the receiver's ISO and exposureDuration values are honored while in AVCaptureExposureModeCustom or AVCaptureExposureModeLocked, you must set your AVCapturePhotoSettings.photoQualityPrioritization property to AVCapturePhotoQualityPrioritizationSpeed. The same rule applies if you use the deprecated AVCapturePhotoSettings.autoStillImageStabilizationEnabled property or AVCaptureStillImageOutput.automaticallyEnablesStillImageStabilizationWhenAvailable property. You must set them to NO to preserve your custom or locked exposure settings.
@@ -1629,7 +1565,7 @@
  @param bias
     The bias to be applied to the exposure target value, as described in the documentation for the exposureTargetBias property.
  @param handler
-    A block to be called when exposureTargetBias has been set to the value specified. If setExposureTargetBias:completionHandler: is called multiple times, the completion handlers will be called in FIFO order. The block receives a timestamp which matches that of the first buffer to which the setting has been applied. Note that the timestamp is synchronized to the device clock, and thus must be converted to the master clock prior to comparison with the timestamps of buffers delivered via an AVCaptureVideoDataOutput. The client may pass nil for the handler parameter if knowledge of the operation's completion is not required.
+    A block to be called when exposureTargetBias has been set to the value specified. If setExposureTargetBias:completionHandler: is called multiple times, the completion handlers will be called in FIFO order. The block receives a timestamp which matches that of the first buffer to which the setting has been applied. Note that the timestamp is synchronized to the device clock, and thus must be converted to the `AVCaptureSession/synchronizationClock` prior to comparison with the timestamps of buffers delivered via an AVCaptureVideoDataOutput. The client may pass nil for the handler parameter if knowledge of the operation's completion is not required.
  
  @discussion
     This is the only way of setting exposureTargetBias. This method throws an NSRangeException if exposureTargetBias is set to an unsupported level. This method throws an NSGenericException if called without first obtaining exclusive access to the receiver using lockForConfiguration:.
@@ -1814,7 +1750,7 @@
  @param whiteBalanceGains
     The white balance gain values, as described in the documentation for the deviceWhiteBalanceGains property. A value of AVCaptureWhiteBalanceGainsCurrent can be used to indicate that the caller does not wish to specify a value for deviceWhiteBalanceGains.
  @param handler
-    A block to be called when white balance gains have been set to the values specified and whiteBalanceMode is set to AVCaptureWhiteBalanceModeLocked. If setWhiteBalanceModeLockedWithDeviceWhiteBalanceGains:completionHandler: is called multiple times, the completion handlers will be called in FIFO order. The block receives a timestamp which matches that of the first buffer to which all settings have been applied. Note that the timestamp is synchronized to the device clock, and thus must be converted to the master clock prior to comparison with the timestamps of buffers delivered via an AVCaptureVideoDataOutput. This parameter may be nil if synchronization is not required.
+    A block to be called when white balance gains have been set to the values specified and whiteBalanceMode is set to AVCaptureWhiteBalanceModeLocked. If setWhiteBalanceModeLockedWithDeviceWhiteBalanceGains:completionHandler: is called multiple times, the completion handlers will be called in FIFO order. The block receives a timestamp which matches that of the first buffer to which all settings have been applied. Note that the timestamp is synchronized to the device clock, and thus must be converted to the `AVCaptureSession/synchronizationClock` prior to comparison with the timestamps of buffers delivered via an AVCaptureVideoDataOutput. This parameter may be nil if synchronization is not required.
  
  @discussion
     Gain values are normalized to the minimum channel value to avoid brightness changes (e.g. R:2 G:2 B:4 will be normalized to R:1 G:1 B:2). For each channel in the whiteBalanceGains struct, only values between 1.0 and maxWhiteBalanceGain after nomalization are supported.  This method throws an NSRangeException if any of the whiteBalanceGains are set to an unsupported level. This method throws an NSGenericException if called without first obtaining exclusive access to the receiver using lockForConfiguration:.
@@ -2702,31 +2638,20 @@
 @property(nonatomic, readonly) NSSet<AVSpatialCaptureDiscomfortReason> *spatialCaptureDiscomfortReasons API_AVAILABLE(macos(15.0), ios(18.0), macCatalyst(18.0), tvos(18.0)) API_UNAVAILABLE(visionos) API_UNAVAILABLE(watchos);
 @end
 
-/*!
- @group AVCaptureSceneMonitoringStatus string constants
-
- @discussion
-    Some features have certain requirements on the scene (lighting condition for Cinematic Video, for example) to produce optimal results; these AVCaptureSceneMonitoringStatus string constants are used to represent such scene statuses for a given feature.
- */
+/// An informative status about the scene observed by the device.
+///
+/// Some features have certain requirements on the scene (lighting condition for Cinematic Video, for example) to produce optimal results; these ``AVCaptureSceneMonitoringStatus`` string constants are used to represent such scene statuses for a given feature.
 typedef NSString *AVCaptureSceneMonitoringStatus NS_TYPED_ENUM API_AVAILABLE(macos(26.0), ios(26.0), macCatalyst(26.0), tvos(26.0)) API_UNAVAILABLE(visionos) API_UNAVAILABLE(watchos);
 
-/*!
- @constant AVCaptureSceneMonitoringStatusNotEnoughLight
-    The lighting of the current scene is not bright enough.
- */
+/// The light level of the current scene is insufficient for the current set of features to function optimally.
 AVF_EXPORT AVCaptureSceneMonitoringStatus const AVCaptureSceneMonitoringStatusNotEnoughLight API_AVAILABLE(macos(26.0), ios(26.0), macCatalyst(26.0), tvos(26.0)) API_UNAVAILABLE(visionos) API_UNAVAILABLE(watchos);
 
 API_AVAILABLE(macos(26.0), ios(26.0), macCatalyst(26.0), tvos(26.0)) API_UNAVAILABLE(visionos) API_UNAVAILABLE(watchos)
 @interface AVCaptureDevice (AVCaptureDeviceCinematicVideoCapture)
 
-/*!
- @property cinematicVideoCaptureSceneMonitoringStatuses
- @abstract
-    Indicates the current scene monitoring statuses related to Cinematic Video capture.
- 
- @discussion
-    This property can be monitored in order to determine the presentation of UI elements to inform the user that they should reframe their scene for a better Cinematic Video experience ("scene is too dark").
- */
+/// The current scene monitoring statuses related to Cinematic Video capture.
+///
+/// Monitor this property via key-value observation to present a UI informing the user that they should reframe their scene for a better Cinematic Video experience ("scene is too dark").
 @property(nonatomic, readonly) NSSet<AVCaptureSceneMonitoringStatus> *cinematicVideoCaptureSceneMonitoringStatuses;
 
 @end
@@ -3679,74 +3604,39 @@
 API_AVAILABLE(macos(26.0), ios(26.0), macCatalyst(26.0), tvos(26.0)) API_UNAVAILABLE(visionos) API_UNAVAILABLE(watchos)
 @interface AVCaptureDeviceFormat (AVCaptureDeviceFormatCinematicVideoSupport)
 
-/*!
- @property cinematicVideoCaptureSupported
- @abstract
-    Indicates whether the format supports Cinematic Video capture.
- 
- @discussion
-    This property returns YES if the format supports Cinematic Video that produces a controllable, simulated depth of field and adds beautiful focus transitions for a cinema-grade look.
- */
+/// Indicates whether the format supports Cinematic Video capture.
+///
+///This property returns `true` if the format supports Cinematic Video that produces a controllable, simulated depth of field and adds beautiful focus transitions for a cinema-grade look.
 @property(nonatomic, readonly, getter=isCinematicVideoCaptureSupported) BOOL cinematicVideoCaptureSupported API_AVAILABLE(macos(26.0), ios(26.0), macCatalyst(26.0), tvos(26.0)) API_UNAVAILABLE(visionos) API_UNAVAILABLE(watchos);
 
-/*!
- @property defaultSimulatedAperture
- @abstract
-    Default shallow depth of field simulated aperture.
- 
- @discussion
-    This will return a non-zero value on devices that support the shallow depth of field effect.
- */
+/// Default shallow depth of field simulated aperture.
+///
+/// This property return a non-zero value on devices that support the shallow depth of field effect.
 @property(nonatomic, readonly) float defaultSimulatedAperture API_AVAILABLE(macos(26.0), ios(26.0), macCatalyst(26.0), tvos(26.0)) API_UNAVAILABLE(visionos) API_UNAVAILABLE(watchos);
 
-/*!
- @property minSimulatedAperture
- @abstract
-    Minimum supported shallow depth of field simulated aperture.
- 
- @discussion
-    On devices that do not support changing the simulated aperture value, this will return a value of 0.
- */
+/// Minimum supported shallow depth of field simulated aperture.
+///
+/// On devices that do not support changing the simulated aperture value, this returns a value of `0`.
 @property(nonatomic, readonly) float minSimulatedAperture API_AVAILABLE(macos(26.0), ios(26.0), macCatalyst(26.0), tvos(26.0)) API_UNAVAILABLE(visionos) API_UNAVAILABLE(watchos);
 
-/*!
- @property maxSimulatedAperture
- @abstract
-    Maximum supported shallow depth of field simulated aperture.
- 
- @discussion
-    On devices that do not support changing the simulated aperture value, this will return a value of 0.
- */
+/// Maximum supported shallow depth of field simulated aperture.
+///
+/// On devices that do not support changing the simulated aperture value, this returns a value of `0`.
 @property(nonatomic, readonly) float maxSimulatedAperture API_AVAILABLE(macos(26.0), ios(26.0), macCatalyst(26.0), tvos(26.0)) API_UNAVAILABLE(visionos) API_UNAVAILABLE(watchos);
 
-/*!
- @property videoMinZoomFactorForCinematicVideo
- @abstract
-    Indicates the minimum zoom factor available for the AVCaptureDevice's videoZoomFactor property when Cinematic Video capture is enabled on the device input.
- 
- @discussion
-    Devices support a limited zoom range when Cinematic Video capture is active. If this device format does not support Cinematic Video capture, this property returns 1.0.
- */
+/// Indicates the minimum zoom factor available for the ``AVCaptureDevice/videoZoomFactor`` property when Cinematic Video capture is enabled on the device input.
+///
+/// Devices support a limited zoom range when Cinematic Video capture is active. If this device format does not support Cinematic Video capture, this property returns `1.0`.
 @property(nonatomic, readonly) CGFloat videoMinZoomFactorForCinematicVideo API_AVAILABLE(macos(26.0), ios(26.0), macCatalyst(26.0), tvos(26.0)) API_UNAVAILABLE(visionos) API_UNAVAILABLE(watchos);
 
-/*!
- @property videoMaxZoomFactorForCinematicVideo
- @abstract
-    Indicates the maximum zoom factor available for the AVCaptureDevice's videoZoomFactor property when Cinematic Video capture is enabled on the device input.
- 
- @discussion
-    Devices support a limited zoom range when Cinematic Video capture is active. If this device format does not support Cinematic Video capture, this property returns 1.0.
- */
+/// Indicates the maximum zoom factor available for the ``AVCaptureDevice/videoZoomFactor`` property when Cinematic Video capture is enabled on the device input.
+///
+/// Devices support a limited zoom range when Cinematic Video capture is active. If this device format does not support Cinematic Video capture, this property returns `1.0`.
 @property(nonatomic, readonly) CGFloat videoMaxZoomFactorForCinematicVideo API_AVAILABLE(macos(26.0), ios(26.0), macCatalyst(26.0), tvos(26.0)) API_UNAVAILABLE(visionos) API_UNAVAILABLE(watchos);
 
-/*!
- @property videoFrameRateRangeForCinematicVideo
- @abstract
-    Indicates the minimum / maximum frame rates available when Cinematic Video capture is enabled on the device input.
- 
- @discussion
-    Devices may support a limited frame rate range when Cinematic Video capture is active. If this device format does not support Cinematic Video capture, this property returns nil.
- */
+/// Indicates the minimum / maximum frame rates available when Cinematic Video capture is enabled on the device input.
+///
+/// Devices may support a limited frame rate range when Cinematic Video capture is active. If this device format does not support Cinematic Video capture, this property returns `nil`.
 @property(nonatomic, readonly, nullable) AVFrameRateRange *videoFrameRateRangeForCinematicVideo API_AVAILABLE(macos(26.0), ios(26.0), macCatalyst(26.0), tvos(26.0)) API_UNAVAILABLE(visionos) API_UNAVAILABLE(watchos);
 
 @end
@@ -3754,85 +3644,51 @@
 API_AVAILABLE(macos(26.0), ios(26.0), macCatalyst(26.0), tvos(26.0)) API_UNAVAILABLE(visionos) API_UNAVAILABLE(watchos)
 @interface AVCaptureDeviceFormat (CameraLensSmudgeDetection)
 
-/*!
- @property cameraLensSmudgeDetectionSupported
- @abstract
-    A BOOL value specifying whether camera lens smudge detection is supported.
-
- @discussion
-    This property returns YES if the session's current configuration supports lens smudge detection. When switching cameras or formats this property may change. When this property changes from YES to NO, cameraLensSmudgeDetectionEnabled also reverts to NO. If you've previously opted in for lens smudge detection and then change configurations, you may need to set cameraLensSmudgeDetectionEnabled = YES again.
- */
+/// Whether camera lens smudge detection is supported.
+///
+/// This property returns `true` if the session's current configuration supports lens smudge detection. When switching cameras or formats, this property may change. When this property changes from `true` to `false`, ``AVCaptureDevice/cameraLensSmudgeDetectionEnabled`` also reverts to `false`. If you opt in for lens smudge detection and then change configurations, you should set ``AVCaptureDevice/cameraLensSmudgeDetectionEnabled`` to `true` again.
 @property(nonatomic, readonly, getter=isCameraLensSmudgeDetectionSupported) BOOL cameraLensSmudgeDetectionSupported API_AVAILABLE(macos(26.0), ios(26.0), macCatalyst(26.0), tvos(26.0)) API_UNAVAILABLE(visionos) API_UNAVAILABLE(watchos);
 
 @end
 
 API_AVAILABLE(macos(26.0), ios(26.0), macCatalyst(26.0), tvos(26.0)) API_UNAVAILABLE(visionos) API_UNAVAILABLE(watchos)
 @interface AVCaptureDevice (CameraLensSmudgeDetection)
-/*!
- @method setCameraLensSmudgeDetectionEnabled:detectionInterval
- @abstract
-    Specify whether to enable camera lens smudge detection, and the interval time between each run of detections.
 
- @param cameraLensSmudgeDetectionEnabled
-    Specify whether camera lens smudge detection should be enabled.
- @param detectionInterval
-    The detection running interval if detection is enabled.
- @discussion
-    Each run of detection processes frames over a short period, and produces one detection result. Use `detectionInterval` to specify the interval time between each run of detections. For example, when `cameraLensSmudgeDetectionEnabled` is set to YES and `detectionInterval` is set to 1 minute, detection runs once per minute, and updates `AVCaptureCameraLensSmudgeDetectionStatus`. If `detectionInterval` is set to `kCMTimeInvalid`, detection will only run once after the session starts. If `detectionInterval` is set to `kCMTimeZero`, detection will run continuously.
-    
-    AVCaptureDevice throws an NSInvalidArgumentException if `cameraLensSmudgeDetectionSupported` property on the current active format returns NO. From disabled (or stopped) to enabling requires a lengthy reconfiguration of the capture render pipeline, so if you intend to enable this feature, you should enable this detection before calling -[AVCaptureSession startRunning] or within -[AVCaptureSession beginConfiguration] and -[AVCaptureSession commitConfiguration] while running.
- */
+/// Specify whether to enable camera lens smudge detection, and the interval time between each run of detections.
+///
+/// - Parameter cameraLensSmudgeDetectionEnabled: Specify whether camera lens smudge detection should be enabled.
+/// - Parameter detectionInterval: The detection running interval if detection is enabled.
+///
+/// Each run of detection processes frames over a short period, and produces one detection result. Use `detectionInterval` to specify the interval time between each run of detections. For example, when ``cameraLensSmudgeDetectionEnabled`` is set to `true` and `detectionInterval` is set to 1 minute, detection runs once per minute, and updates ``AVCaptureCameraLensSmudgeDetectionStatus``. If `detectionInterval` is set to ``kCMTimeInvalid``, detection runs only once after the session starts. If `detectionInterval` is set to ``kCMTimeZero``, detection runs continuously.
+///
+/// ``AVCaptureDevice`` throws an `NSInvalidArgumentException` if the ``AVCaptureDeviceFormat/cameraLensSmudgeDetectionSupported`` property on the current active format returns `false`. Enabling detection requires a lengthy reconfiguration of the capture render pipeline, so you should enable detection before calling ``AVCaptureSession/startRunning`` or within ``AVCaptureSession/beginConfiguration`` and ``AVCaptureSession/commitConfiguration`` while running.
 - (void)setCameraLensSmudgeDetectionEnabled:(BOOL)cameraLensSmudgeDetectionEnabled detectionInterval:(CMTime)detectionInterval API_AVAILABLE(macos(26.0), ios(26.0), macCatalyst(26.0), tvos(26.0)) API_UNAVAILABLE(visionos) API_UNAVAILABLE(watchos);
 
-/*!
- @property cameraLensSmudgeDetectionEnabled
- @abstract
-    The cameraLensSmudgeDetectionEnabled as set by -[AVCaptureDevice setCameraLensSmudgeDetectionEnabled:detectionInterval:].
- 
- @discussion
-    By default, this property is set to NO.
- */
+/// Whether camera lens smudge detection is enabled.
+///
+/// You enable lens smudge detection by calling ``setCameraLensSmudgeDetectionEnabled:detectionInterval:``. By default, this property is returns `false`.
 @property(nonatomic, readonly, getter=isCameraLensSmudgeDetectionEnabled) BOOL cameraLensSmudgeDetectionEnabled API_AVAILABLE(macos(26.0), ios(26.0), macCatalyst(26.0), tvos(26.0)) API_UNAVAILABLE(visionos) API_UNAVAILABLE(watchos);
 
-/*!
- @property cameraLensSmudgeDetectionInterval
- @abstract
-    The cameraLensSmudgeDetectionInterval as set by -[AVCaptureDevice setCameraLensSmudgeDetectionEnabled:detectionInterval:].
- 
- @discussion
-    By default, this property is set to kCMTimeInvalid.
- */
+/// The camera lens smudge detection interval.
+///
+/// ``cameraLensSmudgeDetectionInterval`` is set by calling ``setCameraLensSmudgeDetectionEnabled:detectionInterval:``. By default, this property returns `kCMTimeInvalid`.
 @property(nonatomic, readonly) CMTime cameraLensSmudgeDetectionInterval API_AVAILABLE(macos(26.0), ios(26.0), macCatalyst(26.0), tvos(26.0)) API_UNAVAILABLE(visionos) API_UNAVAILABLE(watchos);
 
-/*!
- @enum AVCaptureCameraLensSmudgeDetectionStatus
- @abstract
-    Constants indicating the current camera lens smudge detection status.
- 
- @constant AVCaptureCameraLensSmudgeDetectionStatusDisabled
-    Indicates that the detection is not enabled.
- @constant AVCaptureCameraLensSmudgeDetectionStatusSmudgeNotDetected
-    Indicates that the most recent detection identifies smudge is not detected on camera lens.
- @constant AVCaptureCameraLensSmudgeDetectionStatusSmudged
-    Indicates that the most recent detection identifies camera lens is smudged.
- @constant AVCaptureCameraLensSmudgeDetectionStatusUnknown
-    Indicates that the detection result hasn't settled, commonly caused by excessive camera movement or the content of image.
- */
+/// Constants indicating the current camera lens smudge detection status.
 typedef NS_ENUM(NSInteger, AVCaptureCameraLensSmudgeDetectionStatus) {
+    /// Indicates that the detection is not enabled.
     AVCaptureCameraLensSmudgeDetectionStatusDisabled            = 0,
+    /// Indicates that the most recent detection found no smudge on the camera lens.
     AVCaptureCameraLensSmudgeDetectionStatusSmudgeNotDetected   = 1,
+    /// Indicates that the most recent detection found the camera lens to be smudged.
     AVCaptureCameraLensSmudgeDetectionStatusSmudged             = 2,
+    /// Indicates that the detection result has not settled, commonly caused by excessive camera movement or the content of the scene.
     AVCaptureCameraLensSmudgeDetectionStatusUnknown             = 3,
 } API_AVAILABLE(macos(26.0), ios(26.0), macCatalyst(26.0), tvos(26.0)) API_UNAVAILABLE(visionos) API_UNAVAILABLE(watchos);
 
-/*!
- @property cameraLensSmudgeDetectionStatus
- @abstract
-    A value specifying the status of camera lens smudge detection.
-
- @discussion
-    During the initial detection execution, `cameraLensSmudgeDetectionStatus` is `AVCaptureCameraLensSmudgeDetectionStatusUnknown` before detection result is settled. Once a detection result is produced, `cameraLensSmudgeDetectionStatus` is the most recent detection result. This property can be key-value observed.
- */
+/// A value specifying the status of camera lens smudge detection.
+///
+/// During initial detection execution, ``cameraLensSmudgeDetectionStatus`` returns ``AVCaptureCameraLensSmudgeDetectionStatusUnknown`` until the detection result settles. Once a detection result is produced, ``cameraLensSmudgeDetectionStatus`` returns the most recent detection result. This property can be key-value observed.
 @property(nonatomic, readonly) AVCaptureCameraLensSmudgeDetectionStatus cameraLensSmudgeDetectionStatus API_AVAILABLE(macos(26.0), ios(26.0), macCatalyst(26.0), tvos(26.0)) API_UNAVAILABLE(visionos) API_UNAVAILABLE(watchos);
 
 @end
diff -ruN /Applications/Xcode_26.0.0-beta5.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureFileOutput.h /Applications/Xcode_26.0.0-beta6.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureFileOutput.h
--- /Applications/Xcode_26.0.0-beta5.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureFileOutput.h	2025-07-27 03:33:46
+++ /Applications/Xcode_26.0.0-beta6.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureFileOutput.h	2025-08-06 07:59:06
@@ -575,7 +575,6 @@
     Returns whether or not capturing spatial video to a file is supported. Note that in order to be supported, two conditions must be met. (1) The source AVCaptureDevice's activeFormat.spatialVideoCaptureSupported property must return YES. (2) The video AVCaptureConnection's activeVideoStabilizationMode property must return AVCaptureVideoStabilizationModeCinematic, AVCaptureVideoStabilizationModeCinematicExtended, or AVCaptureVideoStabilizationModeCinematicExtendedEnhanced.
  */
 @property(nonatomic, readonly, getter=isSpatialVideoCaptureSupported) BOOL spatialVideoCaptureSupported API_AVAILABLE(macos(15.0), ios(18.0), macCatalyst(18.0), tvos(18.0)) API_UNAVAILABLE(visionos) API_UNAVAILABLE(watchos);
-
 /*!
  @property spatialVideoCaptureEnabled
  @abstract
diff -ruN /Applications/Xcode_26.0.0-beta5.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureInput.h /Applications/Xcode_26.0.0-beta6.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureInput.h
--- /Applications/Xcode_26.0.0-beta5.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureInput.h	2025-07-25 10:08:30
+++ /Applications/Xcode_26.0.0-beta6.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureInput.h	2025-08-04 11:36:48
@@ -350,55 +350,39 @@
  */
 @property(nonatomic, getter=isWindNoiseRemovalEnabled) BOOL windNoiseRemovalEnabled API_AVAILABLE(macos(15.0), ios(18.0), macCatalyst(18.0), tvos(18.0)) API_UNAVAILABLE(visionos) API_UNAVAILABLE(watchos);
 
-/*!
- @property cinematicVideoCaptureSupported
- @abstract
-    A BOOL value specifying whether Cinematic Video capture is supported.
- 
- @discussion
-    With Cinematic Video capture, you get a simulated depth-of-field effect that keeps your subjects—people, pets, and more—in sharp focus while applying a pleasing blur to the background (or foreground). Depending on the focus mode (see `AVCaptureCinematicVideoFocusMode` for detail), the camera either uses machine learning to automatically detect and focus on subjects in the scene, or it fixes focus on a subject until it exits the scene. Cinematic Videos can be played back and edited using the Cinematic framework.
- 
-    The simulated aperture can be adjusted before the recording starts using the simulatedAperture property. Focus transitions can be dynamically controlled using the Cinematic Video specific focus methods on AVCaptureDevice.
-     
-    The resulted movie file can be played back and edited with the Cinematic framework.
- 
-    This property returns YES if the session's current configuration allows Cinematic Video capture. When switching cameras or formats this property may change. When this property changes from YES to NO, cinematicVideoCaptureEnabled also reverts to NO. If you've previously opted in for Cinematic Video capture and then change configurations, you may need to set cinematicVideoCaptureEnabled = YES again. This property is key-value observable.
- 
-    AVCaptureDepthDataOutput is not supported when cinematicVideoCaptureEnabled is set to true. Running an AVCaptureSession with both of these features throws an NSInvalidArgumentException.
- */
+/// A BOOL value specifying whether Cinematic Video capture is supported.
+///
+/// With Cinematic Video capture, you get a simulated depth-of-field effect that keeps your subjects (people, pets, and more) in sharp focus while applying a pleasing blur to the background (or foreground). Depending on the focus mode (see ``AVCaptureCinematicVideoFocusMode`` for detail), the camera either uses machine learning to automatically detect and focus on subjects in the scene, or it fixes focus on a subject until it exits the scene. Cinematic Videos can be played back and edited using the Cinematic framework.
+///
+/// You can adjust the video's simulated aperture before starting a recording using the ``simulatedAperture`` property. With Cinematic Video specific focus methods on ``AVCaptureDevice``, you can dynamically control focus transitions.
+///
+/// Movie files captured with Cinematic Video enabled can be played back and edited with the [Cinematic framework] (https://developer.apple.com/documentation/cinematic/playing-and-editing-cinematic-mode-video?language=objc).
+///
+/// This property returns `true` if the session's current configuration allows Cinematic Video capture. When switching cameras or formats, this property may change. When this property changes from `true` to `false`, ``cinematicVideoCaptureEnabled`` also reverts to `false`. If you've previously opted in for Cinematic Video capture and then change configuration, you may need to set ``cinematicVideoCaptureEnabled`` to `true` again. This property is key-value observable.
+///
+/// - Note: ``AVCaptureDepthDataOutput`` is not supported when ``cinematicVideoCaptureEnabled`` is set to `true`. Running an ``AVCaptureSession`` with both of these features throws an `NSInvalidArgumentException`.
 @property(nonatomic, readonly, getter=isCinematicVideoCaptureSupported) BOOL cinematicVideoCaptureSupported API_AVAILABLE(macos(26.0), ios(26.0), macCatalyst(26.0), tvos(26.0)) API_UNAVAILABLE(visionos) API_UNAVAILABLE(watchos);
 
-/*!
- @property cinematicVideoCaptureEnabled
- @abstract
-    A BOOL value specifying whether the Cinematic Video effect is applied to any AVCaptureMovieFileOutput, AVCaptureVideoDataOutput, AVCaptureMetadataOutput, and AVCaptureVideoPreviewLayer added to the same capture session.
- 
- @discussion
-    Default is NO. Set to YES to enable support for Cinematic Video capture.
-     
-    When set to YES, the corresponding AVCaptureDevice's focusMode will be updated to AVCaptureFocusModeContinuousAutoFocus. While this is property is YES any attempt to change the focus mode will result in an exception.
-     
-    This property may only be set to YES if cinematicVideoCaptureSupported is YES. Enabling Cinematic Video capture requires a lengthy reconfiguration of the capture render pipeline, so if you intend to capture Cinematic Video, you should set this property to YES before calling -[AVCaptureSession startRunning] or within -[AVCaptureSession beginConfiguration] and -[AVCaptureSession commitConfiguration] while running.
-
- */
+/// A BOOL value specifying whether the Cinematic Video effect is being applied to any movie file output, video data output, metadata output, or video preview layer added to the capture session.
+///
+/// Default is `false`. Set to `true` to enable support for Cinematic Video capture.
+///
+/// When you set this property to `true`, your input's associated ``AVCaptureDevice/focusMode`` changes to ``AVCaptureFocusModeContinuousAutoFocus``. While Cinematic Video capture is enabled, you are not permitted to change your device's focus mode, and any attempt to do so results in an `NSInvalidArgumentException`. You may only set this property to `true` if ``cinematicVideoCaptureSupported`` is `true`.
+///
+/// - Note: Enabling Cinematic Video capture requires a lengthy reconfiguration of the capture render pipeline, so if you intend to capture Cinematic Video, you should set this property to `true` before calling ``AVCaptureSession/startRunning`` or within ``AVCaptureSession/beginConfiguration`` and ``AVCaptureSession/commitConfiguration`` while running.
 @property(nonatomic, getter=isCinematicVideoCaptureEnabled) BOOL cinematicVideoCaptureEnabled API_AVAILABLE(macos(26.0), ios(26.0), macCatalyst(26.0), tvos(26.0)) API_UNAVAILABLE(visionos) API_UNAVAILABLE(watchos);
 
-/*!
- @property simulatedAperture
- @abstract
-    Shallow depth of field simulated aperture.
- 
- @discussion
-    When capturing a Cinematic Video, use this property to control the amount of blur in the simulated depth of field effect.
- 
-    This property only takes effect when cinematicVideoCaptureEnabled is set to YES.
- 
-    Setting this property throws an NSRangeException if simulatedAperture is set to a value less than the AVCaptureDevice's activeFormat.minSimulatedAperture or greater than the AVCaptureDevice's activeFormat.maxSimulatedAperture. This property may only be set if AVCaptureDevice's activeFormat.minSimulatedAperture returns a non-zero value, otherwise an NSInvalidArgumentException is thrown. This property can only be set before a Cinematic Video capture starts. An NSInvalidArgumentException is thrown if simulatedAperture is set when a Cinematic Video is being captured.
- 
-    This property is initialized to the device.activeFormat's defaultSimulatedAperture.
- 
-    This property is key-value observable.
- */
+/// Shallow depth of field simulated aperture.
+///
+/// When capturing a Cinematic Video, use this property to control the amount of blur in the simulated depth of field effect.
+///
+/// This property only takes effect when ``cinematicVideoCaptureEnabled`` is set to `true`.
+///
+/// - Important: Setting this property to a value less than the ``AVCaptureDevice/activeFormat/minSimulatedAperture`` or greater than the ``AVCaptureDevice/activeFormat/maxSimulatedAperture`` throws an `NSRangeException`. you may only set this property if ``AVCaptureDevice/activeFormat/minSimulatedAperture`` returns a non-zero value, otherwise an `NSInvalidArgumentException` is thrown. You must set this property before starting a Cinematic Video capture. If you attempt to set it while a recording is in progress, an `NSInvalidArgumentException` is thrown.
+///
+/// This property is initialized to the associated ``AVCaptureDevice/activeFormat/defaultSimulatedAperture``.
+///
+/// This property is key-value observable.
 @property(nonatomic) float simulatedAperture API_AVAILABLE(macos(26.0), ios(26.0), macCatalyst(26.0), tvos(26.0)) API_UNAVAILABLE(visionos) API_UNAVAILABLE(watchos);
 
 @end
diff -ruN /Applications/Xcode_26.0.0-beta5.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureMetadataOutput.h /Applications/Xcode_26.0.0-beta6.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureMetadataOutput.h
--- /Applications/Xcode_26.0.0-beta5.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureMetadataOutput.h	2025-07-27 03:33:48
+++ /Applications/Xcode_26.0.0-beta6.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureMetadataOutput.h	2025-08-04 10:25:01
@@ -109,14 +109,9 @@
  */
 @property(nonatomic) CGRect rectOfInterest API_AVAILABLE(ios(7.0), macCatalyst(14.0), tvos(17.0)) API_UNAVAILABLE(visionos);
 
-/*!
- @property requiredMetadataObjectTypesForCinematicVideoCapture
- @abstract
-    Indicates the required metadata object types when Cinematic Video capture is enabled.
- 
- @discussion
-    Since the Cinematic Video algorithm requires a particular set of metadata objects to function optimally, you must set your `metadataObjectTypes` property to this property's returned value if you've set `cinematicVideoCaptureEnabled` to YES on the connected device input.
- */
+/// The required metadata object types when Cinematic Video capture is enabled.
+///
+/// Since the Cinematic Video algorithm requires a particular set of metadata objects to function optimally, you must set your ``metadataObjectTypes`` property to this property's returned value if you've set ``AVCaptureDeviceInput/cinematicVideoCaptureEnabled`` to `true` on the connected device input, otherwise an `NSInvalidArgumentException` is thrown.
 @property(nonatomic, readonly) NSArray<AVMetadataObjectType> *requiredMetadataObjectTypesForCinematicVideoCapture API_AVAILABLE(macos(26.0), ios(26.0), macCatalyst(26.0), tvos(26.0)) API_UNAVAILABLE(visionos) API_UNAVAILABLE(watchos);
 
 @end
diff -ruN /Applications/Xcode_26.0.0-beta5.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureOutputBase.h /Applications/Xcode_26.0.0-beta6.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureOutputBase.h
--- /Applications/Xcode_26.0.0-beta5.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureOutputBase.h	2025-07-27 03:33:12
+++ /Applications/Xcode_26.0.0-beta6.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureOutputBase.h	2025-08-05 09:59:57
@@ -114,20 +114,20 @@
  */
 - (CGRect)rectForMetadataOutputRectOfInterest:(CGRect)rectInMetadataOutputCoordinates API_AVAILABLE(macos(10.15), ios(7.0), macCatalyst(14.0), tvos(17.0)) API_UNAVAILABLE(visionos);
 
-/// A Boolean value that indicates whether the output supports deferred start.
+/// A `BOOL` value that indicates whether the output supports deferred start.
 ///
 /// You can only set the ``deferredStartEnabled`` property value to `true` if the output supports deferred start.
 @property(nonatomic, readonly, getter=isDeferredStartSupported) BOOL deferredStartSupported API_AVAILABLE(macos(26.0), ios(26.0), macCatalyst(26.0), tvos(26.0)) API_UNAVAILABLE(visionos) API_UNAVAILABLE(watchos);
 
-/// A Boolean value that Indicates whether to defer starting this capture output.
+/// A `BOOL` value that indicates whether to defer starting this capture output.
 ///
-/// When this value is `true`, the session doesn't prepare the output's resources until some time after ``startRunning`` returns. You can start the visual parts of your user interface (e.g. preview) prior to other parts (e.g. photo/movie capture, metadata output, etc..) to improve startup performance. Set this value to `false` for outputs that your app needs for startup, and `true` for the ones that it doesn't need to start immediately. For example, an ``AVCaptureVideoDataOutput`` that you intend to use for displaying preview should set this value to `false`, so that the frames are available as soon as possible.
+/// When this value is `true`, the session does not prepare the output's resources until some time after ``AVCaptureSession/startRunning`` returns. You can start the visual parts of your user interface (e.g. preview) prior to other parts (e.g. photo/movie capture, metadata output, etc..) to improve startup performance. Set this value to `false` for outputs that your app needs for startup, and `true` for the ones it does not need to start immediately. For example, an ``AVCaptureVideoDataOutput`` that you intend to use for displaying preview should set this value to `false`, so that the frames are available as soon as possible.
 ///
-/// By default, for apps that are linked on or after iOS 19, this property value is `true` for ``AVCapturePhotoOutput`` and ``AVCaptureFileOutput`` subclasses if supported, and `false` otherwise. When set to `true` for ``AVCapturePhotoOutput``, if you want to support multiple capture requests before running deferred start, set ``responsiveCaptureEnabled`` to `true` on that output.
+/// By default, for apps that are linked on or after iOS 26, this property value is `true` for ``AVCapturePhotoOutput`` and ``AVCaptureFileOutput`` subclasses if supported, and `false` otherwise. When set to `true` for ``AVCapturePhotoOutput``, if you want to support multiple capture requests before running deferred start, set ``AVCapturePhotoOutput/responsiveCaptureEnabled`` to `true` on that output.
 ///
-/// If ``deferredStartSupported`` is `false`, setting this property value to `true` results in the system throwing an invalid argument exception.
+/// If ``deferredStartSupported`` is `false`, setting this property value to `true` results in the system throwing an `NSInvalidArgumentException`.
 ///
-/// - Note: Set this value before committing the configuration.
+/// - Note: Set this value before calling ``AVCaptureSession/commitConfiguration`` as it requires a lengthy reconfiguration of the capture render pipeline.
 @property(nonatomic, getter=isDeferredStartEnabled) BOOL deferredStartEnabled API_AVAILABLE(macos(26.0), ios(26.0), macCatalyst(26.0), tvos(26.0)) API_UNAVAILABLE(visionos) API_UNAVAILABLE(watchos);
 
 @end
diff -ruN /Applications/Xcode_26.0.0-beta5.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCapturePhotoOutput.h /Applications/Xcode_26.0.0-beta6.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCapturePhotoOutput.h
--- /Applications/Xcode_26.0.0-beta5.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCapturePhotoOutput.h	2025-07-27 03:40:23
+++ /Applications/Xcode_26.0.0-beta6.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCapturePhotoOutput.h	2025-08-13 08:04:07
@@ -794,6 +794,7 @@
  */
 @property(nonatomic, readonly, getter=isShutterSoundSuppressionSupported) BOOL shutterSoundSuppressionSupported API_AVAILABLE(macos(15.0), ios(18.0), macCatalyst(18.0), tvos(18.0)) API_UNAVAILABLE(visionos) API_UNAVAILABLE(watchos);
 
+
 @end
 
 
diff -ruN /Applications/Xcode_26.0.0-beta5.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureSession.h /Applications/Xcode_26.0.0-beta6.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureSession.h
--- /Applications/Xcode_26.0.0-beta5.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureSession.h	2025-07-27 03:23:10
+++ /Applications/Xcode_26.0.0-beta6.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureSession.h	2025-08-06 08:26:24
@@ -68,30 +68,19 @@
 AVF_EXPORT NSNotificationName const AVCaptureSessionWasInterruptedNotification NS_SWIFT_NAME(AVCaptureSession.wasInterruptedNotification) API_AVAILABLE(macos(10.14), ios(4.0), macCatalyst(14.0), tvos(17.0), visionos(1.0)) API_UNAVAILABLE(watchos);
 
 
-/*!
- @enum AVCaptureSessionInterruptionReason
- @abstract
-    Constants indicating interruption reason. One of these is returned with the AVCaptureSessionWasInterruptedNotification (see AVCaptureSessionInterruptionReasonKey).
- 
- @constant AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableInBackground
-    An interruption caused by the app being sent to the background while using a camera. Camera usage is prohibited while in the background. Beginning in iOS 9.0, AVCaptureSession no longer produces an AVCaptureSessionRuntimeErrorNotification if you attempt to start running a camera while in the background. Instead, it sends an AVCaptureSessionWasInterruptedNotification with AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableInBackground. Provided you don't explicitly call [session stopRunning], your -startRunning request is preserved, and when your app comes back to foreground, you receive AVCaptureSessionInterruptionEndedNotification and your session starts running.
- @constant AVCaptureSessionInterruptionReasonAudioDeviceInUseByAnotherClient
-    An interruption caused by the audio hardware temporarily being made unavailable, for instance, for a phone call, or alarm.
- @constant AVCaptureSessionInterruptionReasonVideoDeviceInUseByAnotherClient
-    An interruption caused by the video device temporarily being made unavailable, for instance, when stolen away by another AVCaptureSession.
- @constant AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableWithMultipleForegroundApps
-    An interruption caused when the app is running in a multi-app layout, causing resource contention and degraded recording quality of service. Given your present AVCaptureSession configuration, the session may only be run if your app occupies the full screen.
- @constant AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableDueToSystemPressure
-    An interruption caused by the video device temporarily being made unavailable due to system pressure, such as thermal duress. See AVCaptureDevice's AVCaptureSystemPressure category for more information.
- @constant AVCaptureSessionInterruptionReasonSensitiveContentMitigationActivated
-    An interruption caused by a SCVideoStreamAnalyzer when it detects sensitive content on an associated AVCaptureDeviceInput.  To resume your capture session, call your analyzer's `continueStream` method.
- */
+/// Constants indicating interruption reason. One of these is returned with the ``AVCaptureSessionWasInterruptedNotification`` (see ``AVCaptureSessionInterruptionReasonKey``).
 typedef NS_ENUM(NSInteger, AVCaptureSessionInterruptionReason) {
+    /// An interruption caused by the app being sent to the background while using a camera. Camera usage is prohibited while in the background. Beginning in iOS 9.0, ``AVCaptureSession`` no longer produces an ``AVCaptureSessionRuntimeErrorNotification`` if you attempt to start running a camera while in the background. Instead, it sends an ``AVCaptureSessionWasInterruptedNotification`` with ``AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableInBackground``. Provided you don't explicitly call ``AVCaptureSession/stopRunning``, your ``AVCaptureSession/startRunning`` request is preserved, and when your app comes back to foreground, you receive ``AVCaptureSessionInterruptionEndedNotification`` and your session starts running.
     AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableInBackground               = 1,
+    /// An interruption caused by the audio hardware temporarily being made unavailable, for instance, for a phone call, or alarm.
     AVCaptureSessionInterruptionReasonAudioDeviceInUseByAnotherClient                   = 2,
+    /// An interruption caused by the video device temporarily being made unavailable, for instance, when stolen away by another ``AVCaptureSession``.
     AVCaptureSessionInterruptionReasonVideoDeviceInUseByAnotherClient                   = 3,
+    /// An interruption caused when the app is running in a multi-app layout, causing resource contention and degraded recording quality of service. Given your present ``AVCaptureSession`` configuration, the session may only be run if your app occupies the full screen.
     AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableWithMultipleForegroundApps = 4,
+    /// An interruption caused by the video device temporarily being made unavailable due to system pressure, such as thermal duress. See ``AVCaptureDevice/systemPressureState`` for more information.
     AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableDueToSystemPressure API_AVAILABLE(ios(11.1), macCatalyst(14.0), visionos(1.0)) = 5,
+    /// An interruption caused by a ``SCVideoStreamAnalyzer`` when it detects sensitive content on an associated ``AVCaptureDeviceInput``.  To resume your capture session, call your analyzer's ``SCVideoStreamAnalyzer/continueStream`` method.
     AVCaptureSessionInterruptionReasonSensitiveContentMitigationActivated API_AVAILABLE(ios(26.0), macCatalyst(26.0), tvos(26.0), visionos(26.0)) API_UNAVAILABLE(macos) = 6,
 } API_AVAILABLE(ios(9.0), macCatalyst(14.0), tvos(17.0), visionos(1.0)) API_UNAVAILABLE(macos) API_UNAVAILABLE(watchos);
 
@@ -595,14 +584,9 @@
  */
 @property(nonatomic) BOOL configuresApplicationAudioSessionToMixWithOthers API_AVAILABLE(ios(18.0), macCatalyst(18.0), tvos(18.0)) API_UNAVAILABLE(macos, visionos);
 
-/*!
- @property configuresApplicationAudioSessionForBluetoothHighQualityRecording
- @abstract
-    Indicates whether the receiver should configure the application's audio session for bluetooth high quality recording.
- 
- @discussion
-    The value of this property is a BOOL indicating whether the receiver should configure the application's audio session for bluetooth high quality recording (AirPods as a high quality microphone). When this property is set to YES, the AVCaptureSession will opt in for high quality bluetooth recording, allowing a user to select AirPods as the active mic source for capture. This property has no effect when usesApplicationAudioSession is set to NO. The default value is NO.
- */
+/// Indicates whether the receiver should configure the application's audio session for bluetooth high quality recording.
+///
+/// The value of this property is a `BOOL` indicating whether the receiver should configure the application's audio session for bluetooth high quality recording (AirPods as a high quality microphone). When this property is set to `true`, the ``AVCaptureSession`` will opt in for high quality bluetooth recording, allowing users of your app to select AirPods as the active mic source for capture. This property has no effect when ``usesApplicationAudioSession`` is set to `false`. The default value is `false`.
 @property(nonatomic) BOOL configuresApplicationAudioSessionForBluetoothHighQualityRecording API_AVAILABLE(ios(26.0)) API_UNAVAILABLE(macos, macCatalyst, tvos, visionos);
 /*!
  @property automaticallyConfiguresCaptureDeviceForWideColor
@@ -682,56 +666,56 @@
  */
 @property(nonatomic, readonly) float hardwareCost API_AVAILABLE(ios(16.0), macCatalyst(16.0), tvos(17.0)) API_UNAVAILABLE(macos, visionos) API_UNAVAILABLE(watchos);
 
-/// A Boolean value that indicates whether the session supports manually running deferred start.
+/// A `BOOL` value that indicates whether the session supports manually running deferred start.
 ///
 /// Deferred Start is a feature that allows you to control, on a per-output basis, whether output objects start when or after the session is started. The session defers starting an output when its ``deferredStartEnabled`` property is set to `true`, and starts it after the session is started.
 ///
 /// You can only set the ``automaticallyRunsDeferredStart`` property value to `false` if the session supports manual deferred start.
 @property(nonatomic, readonly, getter=isManualDeferredStartSupported) BOOL manualDeferredStartSupported API_AVAILABLE(macos(26.0), ios(26.0), macCatalyst(26.0), tvos(26.0)) API_UNAVAILABLE(visionos) API_UNAVAILABLE(watchos);
 
-/// A Boolean value that indicates whether deferred start runs automatically.
+/// A `BOOL` value that indicates whether deferred start runs automatically.
 ///
-/// Deferred Start is a feature that allows you to control, on a per-output basis, whether output objects start when or after the session is started. The session defers starting an output when its ``deferredStartEnabled`` property is set to `true`, and starts it after the session is started.
+/// Deferred Start is a feature that allows you to control, on a per-output basis, whether output objects start when or after the session is started. The session defers starting an output when its ``AVCaptureOutput/deferredStartEnabled`` property is set to `true`, and starts it after the session is started.
 ///
-/// When this value is `true`, ``AVCaptureSession`` automatically runs deferred start. If only ``AVCaptureVideoPreviewLayer`` objects have ``deferredStartEnabled`` set to `false`, the session runs deferred start a short time after displaying the first frame. If there are ``AVCaptureOutput`` objects that have ``deferredStartEnabled`` set to `false`, then the session waits until each output that provides streaming data to your app sends its first frame.
+/// When this value is `true`, ``AVCaptureSession`` automatically runs deferred start. If only ``AVCaptureVideoPreviewLayer`` objects have ``AVCaptureVideoPreviewLayer/deferredStartEnabled`` set to `false`, the session runs deferred start a short time after displaying the first frame. If there are ``AVCaptureOutput`` objects that have ``AVCaptureOutput/deferredStartEnabled`` set to `false`, then the session waits until each output that provides streaming data to your app sends its first frame.
 ///
 /// If you set this value to `false`, call ``runDeferredStartWhenNeeded`` to indicate when to run deferred start.
 ///
-/// By default, for apps that are linked on or after iOS 19, this value is `true`.
+/// By default, for apps that are linked on or after iOS 26, this value is `true`.
 ///
-/// If ``manualDeferredStartSupported`` is `false`, setting this property value to `false` results in the session throwing an invalid argument exception.
+/// - Note: If ``manualDeferredStartSupported`` is `false`, setting this property value to `false` results in the session throwing an `NSInvalidArgumentException`.
 ///
 /// - Note: Set this value before committing the configuration.
 @property(nonatomic) BOOL automaticallyRunsDeferredStart API_AVAILABLE(macos(26.0), ios(26.0), macCatalyst(26.0), tvos(26.0)) API_UNAVAILABLE(visionos) API_UNAVAILABLE(watchos);
 
 /// Tells the session to run deferred start when appropriate.
 ///
-/// You can only call this when automaticallyRunsDeferredStart is `false`. Otherwise, the session throws an invalid argument exception.
+/// For best perceived startup performance, call this after displaying the first frame, so that deferred start processing doesn't interfere with other initialization operations. For example, if using a <doc://com.apple.documentation/documentation/quartzcore/cametallayer> to draw camera frames, add a `presentHandler` (using <doc://com.apple.documentation/metal/mtldrawable/addpresentedhandler>) to the first drawable and call ``runDeferredStartWhenNeeded`` from there.
 ///
-/// For best perceived startup performance, call this after displaying the first frame, so that deferred start processing doesn't interfere with other initialization operations. For example, if using a <doc://com.apple.documentation/documentation/quartzcore/cametallayer> to draw camera frames, add a presentHandler (using <doc://com.apple.documentation/metal/mtldrawable/addpresentedhandler>) to the first drawable and call ``runDeferredStartWhenNeeded`` from there.
-///
 /// If one or more outputs need to start to perform a capture operation, and ``runDeferredStartWhenNeeded`` has not run yet, the session runs the deferred start on your app's behalf. Only call this method once for each configuration commit - after the first call, subsequent calls to ``runDeferredStartWhenNeeded`` have no effect. The deferred start runs asynchronously, so this method returns immediately.
 ///
-/// Important: -To avoid blocking your app's UI, don't call this method from the application's main actor or queue.
+/// - Note: You can only call this when ``automaticallyRunsDeferredStart`` is `false`. Otherwise, the session throws an `NSInvalidArgumentException`.
+///
+/// - Important: To avoid blocking your app's UI, don't call this method from the application's main actor or queue.
 - (void)runDeferredStartWhenNeeded API_AVAILABLE(macos(26.0), ios(26.0), macCatalyst(26.0), tvos(26.0)) API_UNAVAILABLE(visionos) API_UNAVAILABLE(watchos);
 
 /// A delegate object that observes events about deferred start.
 ///
-/// Call the ``setDeferredStartDelegate:queue:`` method to set the deferred start delegate for a session.
+/// Call the ``setDeferredStartDelegate:deferredStartDelegateCallbackQueue:`` method to set the deferred start delegate for a session.
 @property(nonatomic, readonly, nullable) id<AVCaptureSessionDeferredStartDelegate> deferredStartDelegate API_AVAILABLE(macos(26.0), ios(26.0), macCatalyst(26.0), tvos(26.0)) API_UNAVAILABLE(visionos) API_UNAVAILABLE(watchos);
 
 /// The dispatch queue on which the session calls deferred start delegate methods.
 ///
-/// Call the ``setDeferredStartDelegate:queue:`` method to specify the dispatch queue on which to call the deferred start delegate methods.
+/// Call the ``setDeferredStartDelegate:deferredStartDelegateCallbackQueue:`` method to specify the dispatch queue on which to call the deferred start delegate methods.
 @property(nonatomic, readonly, nullable) dispatch_queue_t deferredStartDelegateCallbackQueue API_AVAILABLE(macos(26.0), ios(26.0), macCatalyst(26.0), tvos(26.0)) API_UNAVAILABLE(visionos) API_UNAVAILABLE(watchos);
 
 /// Sets a delegate object for the session to call when performing deferred start.
 ///
-/// This delegate receives a call to the ``sessionWillRunDeferredStart`` method when deferred start is about to run. It is non-blocking, so it's also possible that by the time this method is called, the deferred start is already underway. If you want your app to perform initialization (potentially) concurrently with deferred start (e.g. user-facing camera features that are not needed to display the first preview frame, but are available to the user as soon as possible) it may be done in the delegate's ``sessionWillRunDeferredStart`` method. To wait until deferred start is finished to perform some remaining initialization work, use the ``sessionDidRunDeferredStart`` method instead.
+/// This delegate receives a call to the ``AVCaptureSessionDeferredStartDelegate/sessionWillRunDeferredStart:`` method when deferred start is about to run. It is non-blocking, so by the time this method is called, the deferred start may already be underway. If you want your app to perform initialization (potentially) concurrently with deferred start (e.g. user-facing camera features that are not needed to display the first preview frame, but are available to the user as soon as possible) it may be done in the delegate's ``AVCaptureSessionDeferredStartDelegate/sessionWillRunDeferredStart:`` method. To wait until deferred start is finished to perform some remaining initialization work, use the ``AVCaptureSessionDeferredStartDelegate/sessionDidRunDeferredStart:`` method instead.
 ///
-/// The delegate receives a call to the ``sessionDidRunDeferredStart`` method when the deferred start finishes running. This allows you to run less-critical application initialization code. For example, if you've deferred an ``AVCapturePhotoOutput`` by setting its ``deferredStartEnabled`` property to `true`, and you'd like to do some app-specific initialization related to still capture, here might be a good place to put it.
+/// The delegate receives a call to the ``AVCaptureSessionDeferredStartDelegate/sessionDidRunDeferredStart:`` method when the deferred start finishes running. This allows you to run less-critical application initialization code. For example, if you've deferred an ``AVCapturePhotoOutput`` by setting its ``AVCaptureOutput/deferredStartEnabled`` property to `true`, and you'd like to do some app-specific initialization related to still capture, here might be a good place to put it.
 ///
-/// If the delegate is non-nil, the session still calls the ``sessionWillRunDeferredStart`` and ``sessionDidRunDeferredStart`` methods regardless of the value of the session's ``automaticallyRunsDeferredStart`` property.
+/// If the delegate is non-nil, the session still calls the ``AVCaptureSessionDeferredStartDelegate/sessionWillRunDeferredStart:`` and ``AVCaptureSessionDeferredStartDelegate/sessionDidRunDeferredStart:`` methods regardless of the value of the session's ``automaticallyRunsDeferredStart`` property.
 ///
 /// To minimize the capture session's startup latency, defer all unnecessary work until after the session starts. This delegate provides callbacks for you to schedule deferred work without impacting session startup performance.
 ///
@@ -739,10 +723,9 @@
 ///
 /// If ``deferredStartDelegate`` is not `NULL`, the session throws an exception if ``deferredStartDelegateCallbackQueue`` is `nil`.
 ///
-/// - Parameters:
-///   - deferredStartDelegate: An object conforming to the 'AVCaptureSessionDeferredStartDelegate' protocol that receives events about deferred start.
-///   - deferredStartDelegateCallbackQueue: A dispatch queue on which deferredStart delegate methods are called.
-- (void)setDeferredStartDelegate:(nullable id<AVCaptureSessionDeferredStartDelegate>) deferredStartDelegate deferredStartDelegateCallbackQueue:(nullable dispatch_queue_t)deferredStartDelegateCallbackQueue API_AVAILABLE(macos(26.0), ios(26.0), macCatalyst(26.0), tvos(26.0)) API_UNAVAILABLE(visionos) API_UNAVAILABLE(watchos);
+/// - Parameter deferredStartDelegate: An object conforming to the ``AVCaptureSessionDeferredStartDelegate`` protocol that receives events about deferred start.
+/// - Parameter deferredStartDelegateCallbackQueue: A dispatch queue on which deferredStart delegate methods are called.
+- (void)setDeferredStartDelegate:(nullable id<AVCaptureSessionDeferredStartDelegate>)deferredStartDelegate deferredStartDelegateCallbackQueue:(nullable dispatch_queue_t)deferredStartDelegateCallbackQueue API_AVAILABLE(macos(26.0), ios(26.0), macCatalyst(26.0), tvos(26.0)) API_UNAVAILABLE(visionos) API_UNAVAILABLE(watchos);
 @end
 
 
@@ -810,21 +793,20 @@
 
 @end
 
-/// Defines an interface for delegates of `AVCaptureSession` to receive events about the session's deferred start.
+/// Defines an interface for delegates of the capture session to receive events about the session's deferred start.
 API_AVAILABLE(macos(26.0), ios(26.0), macCatalyst(26.0), tvos(26.0)) API_UNAVAILABLE(visionos) API_UNAVAILABLE(watchos)
 @protocol AVCaptureSessionDeferredStartDelegate <NSObject>
 
 /// This method gets called by the session when deferred start is about to run.
 ///
-/// Delegates receive this message when the session has finished the deferred start. This message will be sent regardless of whether the session's automaticallyRunsDeferredStart property is set. See ``setDeferredStartDelegate:queue:`` documentation for more information.
+/// Delegates receive this message when the session has finished the deferred start. This message will be sent regardless of whether the session's ``AVCaptureSession/automaticallyRunsDeferredStart`` property is set. See ``AVCaptureSession/setDeferredStartDelegate:deferredStartDelegateCallbackQueue:`` documentation for more information.
 ///
-/// - Parameters:
-///   - session: The `AVCaptureSession` instance that runs the deferred start.
+/// - Parameter session: The ``AVCaptureSession`` instance that runs the deferred start.
 - (void)sessionWillRunDeferredStart:(AVCaptureSession *)session;
 
 /// This method gets called by the session when deferred start has finished running.
-/// - Parameters:
-///   - session: The `AVCaptureSession` instance that runs the deferred start.
+///
+/// - Parameter session: The ``AVCaptureSession`` instance that runs the deferred start.
 - (void)sessionDidRunDeferredStart:(AVCaptureSession *)session;
 
 @end
@@ -843,7 +825,7 @@
  
     AVCaptureMultiCamSession supports dynamic enabling and disabling of individual camera inputs without interrupting preview. In order to stop an individual camera input, set the enabled property on all of its connections or connected ports to NO. When the last active connection or port is disabled, the source camera stops streaming to save power and bandwidth. Other inputs streaming data through the session are unaffected.
  
-    Prior to iOS 19, AVCaptureMultiCamSession requires all input devices to have an activeFormat where multiCamSupported returns YES. In applications linked on or after iOS 19, this requirement is not enforced when only a single input device is used.
+    Prior to iOS 26, AVCaptureMultiCamSession requires all input devices to have an activeFormat where multiCamSupported returns YES. In applications linked on or after iOS 26, this requirement is not enforced when only a single input device is used.
  */
 API_AVAILABLE(ios(13.0), macCatalyst(14.0), tvos(17.0), visionos(2.1)) API_UNAVAILABLE(macos) API_UNAVAILABLE(watchos)
 @interface AVCaptureMultiCamSession : AVCaptureSession
diff -ruN /Applications/Xcode_26.0.0-beta5.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureSpatialAudioMetadataSampleGenerator.h /Applications/Xcode_26.0.0-beta6.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureSpatialAudioMetadataSampleGenerator.h
--- /Applications/Xcode_26.0.0-beta5.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureSpatialAudioMetadataSampleGenerator.h	2025-07-27 03:40:23
+++ /Applications/Xcode_26.0.0-beta6.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureSpatialAudioMetadataSampleGenerator.h	2025-08-06 08:26:25
@@ -13,61 +13,35 @@
 
 NS_ASSUME_NONNULL_BEGIN
 
-/*!
- @class AVCaptureSpatialAudioMetadataSampleGenerator
- @abstract
-	Defines an interface for generating a spatial audio timed metadata sample.
- */
-
+/// An interface for generating a spatial audio timed metadata sample.
 API_AVAILABLE(ios(26.0)) API_UNAVAILABLE(macos, macCatalyst, tvos, visionos) API_UNAVAILABLE(watchos)
 @interface AVCaptureSpatialAudioMetadataSampleGenerator : NSObject
 
-/*!
- @property timedMetadataSampleBufferFormatDescription
- @abstract
-    Returns the CMFormatDescription that will be specified by the buffer returned from the createTimedMetadataSampleBuffer method.
- 
- @discussion
-    Clients can use this format description when creating their AVAssetWriter track that will contain the metadata.
-*/
+/// Returns the format description of the sample buffer returned from the ``newTimedMetadataSampleBufferAndResetAnalyzer`` method.
+///
+/// Use this format description when creating your ``AVAssetWriter`` track for spatial audio timed metadata.
 @property(readonly, nonatomic) CMFormatDescriptionRef timedMetadataSampleBufferFormatDescription;
 
-/*!
- @method analyzeAudioSample:
- @abstract
-    Analyzes the audio sample buffer for its contribution to the spatial audio timed metadata value.
- 
- @param sbuf
-	An CMSampleBuffer containing spatial audio.
- @result
-	Returns noErr if the sample was able to be analyzed.
- 
- @discussion
-    All of the spatial audio sample buffer that given to an AVAssetWriter need to be analyzed for the generation of the proper spatial audio timed metadata value.
- */
+/// Analyzes the provided audio sample buffer for its contribution to the spatial audio timed metadata value.
+///
+/// - Parameter sbuf: a sample buffer containing spatial audio.
+/// - Returns: `noErr` if the sample is successfully analyzed, otherwise a non-zero error code.
+///
+/// You must call this method with each and every spatial audio buffer you provide to ``AVAssetWriter``, so it can be analyzed for the generation of a proper spatial audio timed metadata value.
 - (OSStatus)analyzeAudioSample:(CMSampleBufferRef)sbuf;
 
-/*!
- @method newTimedMetadataSampleBufferAndResetAnalyzer
- @abstract
-    Returns a CMSampleBuffer containing a spatial audio timed metadata sample containing the value computed from all of the prior audio sample buffers passed to analyzeAudioSample:. The analyzer is also reset to its initial state, making it ready for a new run of sample buffers.
- 
- @result
-	Returns an CMSampleBuffer that contains the spatial audio timed metadata sample. If no value can be computed, NULL will be returned.
- 
- @discussion
-    This method is to be called after the last audio sample buffer has been passed to the client's AVAssetWriterInput for audio. The returned CMSampleBuffer can be passed directly to the client's AVAssetWriterInput for the audio timed metadata track. Note that it is expected that one and only one sample buffer be present in the timed metadata track.
- */
+/// Creates a sample buffer containing a spatial audio timed metadata sample computed from all analyzed audio buffers, and resets the analyzer to its initial state.
+///
+/// - Returns: a ``CMSampleBufferRef`` containing the spatial audio timed metadata sample, or `NULL` if no value can be computed.
+///
+/// Call this method after you pass the last audio sample buffer of your recording to ``analyzeAudioSample:``. Then pass the returned ``CMSampleBufferRef`` directly to your ``AVAssetWriterInput`` to add the sample to your recording's audio timed metadata track. Note that ``AVAssetWriter`` expects one and only one spatial audio metadata sample buffer to be present in the timed metadata track.
+///
+/// - Note: Calling this method also resets the analyzer, making it ready for another run of audio sample buffers. Thus one generator can be re-used for multiple recordings.
 - (nullable CMSampleBufferRef)newTimedMetadataSampleBufferAndResetAnalyzer;
 
-/*!
- @method resetAnalyzer
- @abstract
-    Calling this method will reset the analyzer to its initial state so that a new run of audio sample buffers can be analyzed.
- 
- @discussion
-    If the client needs to abort generating the audio timed metadata buffer for audio buffers already given to analyzeAudioSample:, calling this method is required to prepare the analyzer for a new run of sample buffers.
- */
+/// Calling this method resets the analyzer to its initial state so that a new run of audio sample buffers can be analyzed.
+///
+/// Call this method if you need to abort generating the audio timed metadata buffer for audio already provided to ``analyzeAudioSample:``.
 - (void)resetAnalyzer;
 
 @end
diff -ruN /Applications/Xcode_26.0.0-beta5.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureStillImageOutput.h /Applications/Xcode_26.0.0-beta6.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureStillImageOutput.h
--- /Applications/Xcode_26.0.0-beta5.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureStillImageOutput.h	2025-07-25 10:08:30
+++ /Applications/Xcode_26.0.0-beta6.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureStillImageOutput.h	2025-08-04 10:25:00
@@ -108,6 +108,7 @@
  */
 @property(nonatomic, getter=isHighResolutionStillImageOutputEnabled) BOOL highResolutionStillImageOutputEnabled API_AVAILABLE(macos(10.14), ios(8.0), macCatalyst(14.0)) API_UNAVAILABLE(visionos);
 
+
 /*!
  @property capturingStillImage
  @abstract
diff -ruN /Applications/Xcode_26.0.0-beta5.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureVideoDataOutput.h /Applications/Xcode_26.0.0-beta6.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureVideoDataOutput.h
--- /Applications/Xcode_26.0.0-beta5.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureVideoDataOutput.h	2025-07-27 03:47:36
+++ /Applications/Xcode_26.0.0-beta6.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureVideoDataOutput.h	2025-08-13 07:59:05
@@ -184,16 +184,11 @@
 - (nullable NSDictionary<NSString *, id> *)recommendedVideoSettingsForVideoCodecType:(AVVideoCodecType)videoCodecType assetWriterOutputFileType:(AVFileType)outputFileType outputFileURL:(nullable NSURL *)outputFileURL API_AVAILABLE(macos(14.0), ios(17.0), macCatalyst(17.0), tvos(17.0)) API_UNAVAILABLE(visionos) API_UNAVAILABLE(watchos);
 
 
-/*!
- @property recommendedMediaTimeScaleForAssetWriter
- @abstract
-    Indicates the recommended media timescale for the video track.
+/// Indicates the recommended media timescale for the video track.
+///
+/// - Returns: The recommended media timescale based on the active capture session's inputs. It is never less than 600. It may or may not be a multiple of 600.
+@property(nonatomic, readonly) CMTimeScale recommendedMediaTimeScaleForAssetWriter API_AVAILABLE(macos(26.0), ios(26.0), macCatalyst(26.0), tvos(26.0)) API_UNAVAILABLE(visionos) API_UNAVAILABLE(watchos);
 
- @discussion
-    This will return a recommended media timescale based on the active capture session's inputs. It will not be less than 600. It may or may not be a multiple of 600.
- */
-@property(nonatomic, readonly) CMTimeScale recommendedMediaTimeScaleForAssetWriter API_AVAILABLE(macos(26.0), ios(26.0), macCatalyst(16.0), tvos(26.0)) API_UNAVAILABLE(visionos) API_UNAVAILABLE(watchos);
-
 /*!
  @property availableVideoCVPixelFormatTypes
  @abstract
@@ -254,26 +249,16 @@
  */
 @property(nonatomic) BOOL deliversPreviewSizedOutputBuffers API_AVAILABLE(ios(13.0), macCatalyst(14.0), tvos(17.0)) API_UNAVAILABLE(macos, visionos) API_UNAVAILABLE(watchos);
 
-/*!
- @property preparesCellularRadioForNetworkConnection
- @abstract
-    Indicates whether the receiver should prepare the cellular radio for imminent network activity.
- 
- @discussion
-    Apps that scan video data output buffers for information that will result in network activity (such as detecting a QRCode containing a URL) should set this property true to allow the cellular radio to prepare for an imminent network request. Enabling this property requires a lengthy reconfiguration of the capture render pipeline, so you should set this property to YES before calling -[AVCaptureSession startRunning].
-     
-    Using this API requires your app to adopt the entitlement `com.apple.developer.avfoundation.video-data-output-prepares-cellular-radio-for-machine-readable-code-scanning`.
- */
+/// Indicates whether the receiver should prepare the cellular radio for imminent network activity.
+///
+/// Apps that scan video data output buffers for information that will result in network activity (such as detecting a QRCode containing a URL) should set this property `true` to allow the cellular radio to prepare for an imminent network request. Enabling this property requires a lengthy reconfiguration of the capture render pipeline, so you should set this property to `true` before calling ``AVCaptureSession/startRunning``.
+///
+/// Using this API requires your app to adopt the entitlement `com.apple.developer.avfoundation.video-data-output-prepares-cellular-radio-for-machine-readable-code-scanning`.
 @property BOOL preparesCellularRadioForNetworkConnection API_AVAILABLE(ios(26.0), macCatalyst(26.0), tvos(26.0)) API_UNAVAILABLE(macos, visionos) API_UNAVAILABLE(watchos);
 
-/*!
- @property preservesDynamicHDRMetadata
- @abstract
-    Indicates whether the receiver should preserve dynamic HDR metadata as an attachment on the output sample buffer's underlying CVPixelBufferRef.
- 
- @discussion
-    Set this property to YES if you wish to use AVCaptureVideoDataOutput with AVAssetWriter to record HDR movies. You must also set `kVTCompressionPropertyKey_PreserveDynamicHDRMetadata` to `true` in the compression settings you pass to your `AVAssetWriterInput`. These compression settings are represented under the `AVVideoCompressionPropertiesKey` sub-dictionary of your top-level AVVideoSettings. When this key is set to true, performance improves, as the encoder is able to skip HDR metadata calculation for every frame. The default value is NO.
- */
+/// Indicates whether the receiver should preserve dynamic HDR metadata as an attachment on the output sample buffer's underlying pixel buffer.
+///
+/// Set this property to `true` if you wish to use ``AVCaptureVideoDataOutput`` with ``AVAssetWriter`` to record HDR movies. You must also set ``kVTCompressionPropertyKey_PreserveDynamicHDRMetadata`` to `true` in the compression settings you pass to your ``AVAssetWriterInput``. These compression settings are represented under the ``AVVideoCompressionPropertiesKey`` sub-dictionary of your top-level AVVideoSettings (see <doc://com.apple.documentation/documentation/avfoundation/video-settings>). When you set this key to `true`, performance improves, as the encoder is able to skip HDR metadata calculation for every frame. The default value is `false`.
 @property BOOL preservesDynamicHDRMetadata API_AVAILABLE(ios(26.0), macCatalyst(26.0), tvos(26.0), macos(26.0), visionos(26.0)) API_UNAVAILABLE(watchos);
 
 @end
diff -ruN /Applications/Xcode_26.0.0-beta5.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureVideoPreviewLayer.h /Applications/Xcode_26.0.0-beta6.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureVideoPreviewLayer.h
--- /Applications/Xcode_26.0.0-beta5.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureVideoPreviewLayer.h	2025-07-27 03:40:20
+++ /Applications/Xcode_26.0.0-beta6.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureVideoPreviewLayer.h	2025-08-04 11:36:49
@@ -259,22 +259,22 @@
  */
 @property(nonatomic, getter=isMirrored) BOOL mirrored API_DEPRECATED("Use AVCaptureConnection's videoMirrored instead.", ios(4.0, 6.0)) API_UNAVAILABLE(macos, visionos) API_UNAVAILABLE(tvos);
 
-/// A Boolean value that indicates whether the preview layer supports deferred start.
+/// A `BOOL` value that indicates whether the preview layer supports deferred start.
 ///
 /// You can only set the ``deferredStartEnabled`` property to `true` if the preview layer supports deferred start.
 @property(nonatomic, readonly, getter=isDeferredStartSupported) BOOL deferredStartSupported API_AVAILABLE(macos(26.0), ios(26.0), macCatalyst(26.0), tvos(26.0)) API_UNAVAILABLE(visionos) API_UNAVAILABLE(watchos);
 
-/// A Boolean value that indicates whether to defer starting this preview layer.
+/// A `BOOL` value that indicates whether to defer starting this preview layer.
 ///
-/// When this value is `true`, the session doesn't prepare the output's resources until some time after ``startRunning`` returns. You can start the visual parts of your user interface (e.g. preview) prior to other parts (e.g. photo/movie capture, metadata output, etc..) to improve startup performance. Set this value to `false` for outputs that your app needs for startup, and `true` for the ones that it doesn't need immediately.
+/// When this value is `true`, the session does not prepare the output's resources until some time after ``AVCaptureSession/startRunning`` returns. You can start the visual parts of your user interface (e.g. preview) prior to other parts (e.g. photo/movie capture, metadata output, etc..) to improve startup performance. Set this value to `false` if your app needs video preview immediately for startup, and `true` if it does not.
 ///
 /// By default, this value is `false` for ``AVCaptureVideoPreviewLayer`` objects, since this object is used to display preview. For best session start performance, set ``deferredStartEnabled`` to `false` for preview layers. If your app contains multiple preview layers, you may want to display the main preview layer as soon as possible and allow the remaining layers to display subsequently. In this case, set ``deferredStartEnabled`` to `true` for the remaining layers.
 ///
-/// Setting this property to the same value for all outputs, including ``AVCaptureVideoPreviewLayer`` and ``AVCaptureOutput``, is equivalent to not using deferred start.
+/// - Note: Setting this property to the same value for all outputs, including ``AVCaptureVideoPreviewLayer`` and ``AVCaptureOutput``, is equivalent to not using deferred start.
 ///
-/// If ``deferredStartSupported`` is `false`, setting this property value to `true` results in the session throwing an invalid argument exception.
+/// If ``deferredStartSupported`` is `false`, setting this property value to `true` results in the session throwing an `NSInvalidArgumentException`.
 ///
-/// - Note: Set this value before committing the configuration.
+/// - Note: Set this value before calling ``AVCaptureSession/commitConfiguration`` as it requires a lengthy reconfiguration of the capture render pipeline.
 @property(nonatomic, getter=isDeferredStartEnabled) BOOL deferredStartEnabled API_AVAILABLE(macos(26.0), ios(26.0), macCatalyst(26.0), tvos(26.0)) API_UNAVAILABLE(visionos) API_UNAVAILABLE(watchos);
 
 @end
diff -ruN /Applications/Xcode_26.0.0-beta5.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVMetadataObject.h /Applications/Xcode_26.0.0-beta6.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVMetadataObject.h
--- /Applications/Xcode_26.0.0-beta5.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVMetadataObject.h	2025-07-27 03:34:34
+++ /Applications/Xcode_26.0.0-beta6.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVMetadataObject.h	2025-08-08 03:40:39
@@ -87,22 +87,14 @@
  */
 @property(readonly) AVMetadataObjectType type;
 
-/*!
- @property groupID
- @abstract
-    A number associated with object groups (e.g., face and body) that is unique for each physical object (e.g., a person whom the face and body belong to).
- @discussion
-    The value of this property is an NSInteger indicating the unique identifier to combine objects (for instance, face and body) into groups (a physical person). A human body and face for the same person will have the same group ID. It is set to -1 when it's invalid or not available. When it's set to a value of >=0, it is unique across all object groups.
- */
+/// An identifier associated with a metadata object used to group it with other metadata objects belonging to a common parent.
+///
+/// When presented with a collection of ``AVMetadataObject`` instances of different types, you may use the objects' ``groupID`` to combine them into groups. For example, a human body and face belonging to the same person have the same ``groupID``.  If an object's ``groupID`` property is set to -1, it is invalid. When set to a value of >=0, it is unique across all object groups.
 @property(readonly) NSInteger groupID API_AVAILABLE(macos(26.0), ios(26.0), macCatalyst(26.0), tvos(26.0)) API_UNAVAILABLE(visionos, watchos);
 
-/*!
- @property objectID
- @abstract
-    A unique identifier for each detected object type (face, body, hands, heads and salient objects).
- @discussion
-    Defaults to a value of -1 when it is invalid or not available. When a new object enters the picture, it is assigned a new unique identifier. objectIDs are not re-used as objects leave the picture and new ones enter. Objects that leave the picture then re-enter are assigned a new objectID.
- */
+/// A unique identifier for each detected object type (face, body, hands, heads and salient objects) in a collection.
+///
+/// Defaults to a value of -1 when invalid or not available. When used in conjunction with an ``AVCaptureMetadataOutput``, each newly detected object that enters the scene is assigned a unique identifier. ``objectID``s are never re-used as objects leave the picture and new ones enter. Objects that leave the picture and then re-enter are assigned a new ``objectID``.
 @property(readonly) NSInteger objectID API_AVAILABLE(macos(26.0), ios(26.0), macCatalyst(26.0), tvos(26.0)) API_UNAVAILABLE(visionos, watchos);
 
 @end
@@ -110,20 +102,12 @@
 API_AVAILABLE(macos(26.0), ios(26.0), macCatalyst(26.0), tvos(26.0)) API_UNAVAILABLE(visionos) API_UNAVAILABLE(watchos)
 @interface AVMetadataObject (AVMetadataObjectCinematicVideoSupport)
 
-/*!
- @property cinematicVideoFocusMode
- @abstract
-    Current focus mode when recording a Cinematic Video.
- @discussion
-    Default is AVCaptureCinematicVideoFocusModeNone.
- */
+/// The current focus mode when an object is detected during a Cinematic Video recording.
+///
+/// Default is ``AVCaptureCinematicVideoFocusMode/AVCaptureCinematicVideoFocusModeNone``.
 @property(readonly) AVCaptureCinematicVideoFocusMode cinematicVideoFocusMode API_AVAILABLE(macos(26.0), ios(26.0), macCatalyst(26.0), tvos(26.0)) API_UNAVAILABLE(visionos) API_UNAVAILABLE(watchos);
 
-/*!
- @property fixedFocus
- @abstract
-    A BOOL indicating whether this metadata object represents a fixed focus.
- */
+/// A BOOL indicating whether this metadata object represents a fixed focus.
 @property(readonly, getter=isFixedFocus) BOOL fixedFocus API_AVAILABLE(macos(26.0), ios(26.0), macCatalyst(26.0), tvos(26.0)) API_UNAVAILABLE(visionos) API_UNAVAILABLE(watchos);
 
 @end
@@ -212,21 +196,14 @@
 
 #pragma mark - AVMetadataCatHeadObject
 
-/*!
- @constant AVMetadataObjectTypeCatHead
- @abstract
-    An identifier for an instance of AVMetadataCatHeadObject.
-
- @discussion
-    AVMetadataCatHeadObject objects return this constant as their type.
-*/
+/// An identifier for an instance of a cat head object.
+///
+/// ``AVMetadataCatHeadObject`` objects return this constant as their type.
 AVF_EXPORT AVMetadataObjectType const AVMetadataObjectTypeCatHead API_AVAILABLE(macos(26.0), ios(26.0), macCatalyst(26.0), tvos(26.0)) API_UNAVAILABLE(visionos) API_UNAVAILABLE(watchos);
 
-/*!
- @class AVMetadataCatHeadObject
- @abstract
-    AVMetadataCatHeadObject is a concrete subclass of AVMetadataObject representing a cat head.
- */
+/// A concrete metadata object subclass representing a cat head.
+///
+/// ``AVMetadataCatHeadObject`` is a concrete subclass of ``AVMetadataObject`` representing a cat head.
 API_AVAILABLE(macos(26.0), ios(26.0), macCatalyst(26.0), tvos(26.0)) API_UNAVAILABLE(visionos) API_UNAVAILABLE(watchos)
 @interface AVMetadataCatHeadObject : AVMetadataObject <NSCopying>
 
@@ -261,21 +238,14 @@
 
 #pragma mark - AVMetadataDogHeadObject
 
-/*!
- @constant AVMetadataObjectTypeDogHead
- @abstract
-    An identifier for an instance of AVMetadataDogHeadObject.
-
- @discussion
-    AVMetadataDogHeadObject objects return this constant as their type.
-*/
+/// An identifier for an instance of a dog head object.
+///
+/// ``AVMetadataDogHeadObject`` objects return this constant as their type.
 AVF_EXPORT AVMetadataObjectType const AVMetadataObjectTypeDogHead API_AVAILABLE(macos(26.0), ios(26.0), macCatalyst(26.0), tvos(26.0)) API_UNAVAILABLE(visionos) API_UNAVAILABLE(watchos);
 
-/*!
- @class AVMetadataDogHeadObject
- @abstract
-    AVMetadataDogHeadObject is a concrete subclass of AVMetadataObject representing a dog head.
- */
+/// A concrete metadata object subclass representing a dog head.
+///
+/// ``AVMetadataDogHeadObject`` is a concrete subclass of ``AVMetadataObject`` representing a dog head.
 API_AVAILABLE(macos(26.0), ios(26.0), macCatalyst(26.0), tvos(26.0)) API_UNAVAILABLE(visionos) API_UNAVAILABLE(watchos)
 @interface AVMetadataDogHeadObject : AVMetadataObject <NSCopying>
 

Clone this wiki locally