// swift-interface-format-version: 1.0 // swift-compiler-version: Apple Swift version 6.3 effective-5.10 (swiftlang-6.3.0.123.4 clang-2100.0.123.2) // swift-module-flags: -target arm64e-apple-ios26.4-macabi -enable-objc-interop -autolink-force-load -enable-library-evolution -module-link-name swiftVideoToolbox -swift-version 5 -enforce-exclusivity=checked -O -library-level api -enable-upcoming-feature StrictConcurrency -enable-upcoming-feature InternalImportsByDefault -enable-upcoming-feature MemberImportVisibility -enable-experimental-feature DebugDescriptionMacro -enable-bare-slash-regex -user-module-version 3305.24.5.2 -module-name VideoToolbox // swift-module-flags-ignorable: -formal-cxx-interoperability-mode=off -interface-compiler-version 6.3 public import CoreFoundation @_exported public import CoreMedia public import CoreVideo public import Darwin public import Foundation public import Swift @_exported public import VideoToolbox public import _Concurrency public import _StringProcessing public import _SwiftConcurrencyShims @available(macOS 26.0, iOS 26.0, *) @available(watchOS, unavailable) @available(tvOS, unavailable) @available(visionOS, unavailable) @available(visionOS, unavailable) extension VideoToolbox.VTTemporalNoiseFilterConfiguration { public var supportedPixelFormats: [Darwin.OSType] { get } public class var supportedSourcePixelFormats: [Darwin.OSType] { get } } @available(macOS 15.0, iOS 18.0, tvOS 18.0, visionOS 2.0, *) @available(watchOS, unavailable) public class VTHDRPerFrameMetadataGenerationSession { public enum HDRFormat : Swift.Int, Swift.Sendable { case dolbyVision public init?(rawValue: Swift.Int) @available(iOS 18.0, tvOS 18.0, visionOS 2.0, macOS 15.0, *) @available(watchOS, unavailable) public typealias RawValue = Swift.Int public var rawValue: Swift.Int { get } } public init(framesPerSecond: Swift.Float, hdrFormats: [VideoToolbox.VTHDRPerFrameMetadataGenerationSession.HDRFormat]? = nil) throws public func attachMetadata(to: CoreVideo.CVPixelBuffer, sceneChange: Swift.Bool = false) throws @objc deinit } @available(*, unavailable) extension VideoToolbox.VTHDRPerFrameMetadataGenerationSession : Swift.Sendable { } @available(macOS 26.0, iOS 26.0, tvOS 26.0, visionOS 26.0, *) @available(watchOS, unavailable) extension VideoToolbox.VTLowLatencyFrameInterpolationConfiguration { public var supportedPixelFormats: [Darwin.OSType] { get } } @available(macOS 26.0, iOS 26.0, tvOS 26.0, visionOS 26.0, *) @available(watchOS, unavailable) extension VideoToolbox.VTLowLatencyFrameInterpolationParameters { convenience public init?(sourceFrame: VideoToolbox.VTFrameProcessorFrame, previousFrame: VideoToolbox.VTFrameProcessorFrame, interpolationPhase: [Swift.Float], destinationFrames: [VideoToolbox.VTFrameProcessorFrame]) public var interpolationPhase: [Swift.Float] { get } } @available(macOS 15.4, iOS 26.0, *) @available(watchOS, unavailable) @available(tvOS, unavailable) @available(visionOS, unavailable) extension VideoToolbox.VTFrameRateConversionConfiguration { @available(watchOS, unavailable) @available(tvOS, unavailable) @available(visionOS, unavailable) @available(iOS, unavailable) @available(macOS, introduced: 15.4, deprecated: 15.4, message: "use supportedPixelFormats: [OSType] instead") public var frameSupportedPixelFormats: [Foundation.NSNumber] { get } public var supportedPixelFormats: [Darwin.OSType] { get } } @available(macOS 15.4, iOS 26.0, *) @available(watchOS, unavailable) @available(tvOS, unavailable) @available(visionOS, unavailable) extension VideoToolbox.VTFrameRateConversionParameters { convenience public init?(sourceFrame: VideoToolbox.VTFrameProcessorFrame, nextFrame: VideoToolbox.VTFrameProcessorFrame, opticalFlow: VideoToolbox.VTFrameProcessorOpticalFlow?, interpolationPhase: [Swift.Float], submissionMode: VideoToolbox.VTFrameRateConversionParameters.SubmissionMode, destinationFrames: [VideoToolbox.VTFrameProcessorFrame]) public var interpolationPhase: [Swift.Float] { get } } @available(macOS 26.0, iOS 26.0, tvOS 26.0, visionOS 26.0, *) @available(watchOS, unavailable) extension VideoToolbox.VTLowLatencySuperResolutionScalerConfiguration { public class func supportedScaleFactors(frameWidth: Swift.Int, frameHeight: Swift.Int) -> [Swift.Float] public var supportedPixelFormats: [Darwin.OSType] { get } } @available(macOS 26.0, iOS 26.0, *) @available(watchOS, unavailable) @available(tvOS, unavailable) @available(visionOS, unavailable) extension VideoToolbox.VTSuperResolutionScalerConfiguration { public class var supportedScaleFactors: [Swift.Int] { get } public var supportedPixelFormats: [Darwin.OSType] { get } } @available(macOS 26.0, iOS 26.0, tvOS 26.0, visionOS 26.0, *) @available(watchOS, unavailable) final public class VTMotionEstimationSession : Swift.Sendable { public enum BlockSize : Swift.Int, Swift.Sendable { case blockSize16x16 case blockSize4x4 public init?(rawValue: Swift.Int) @available(iOS 26.0, tvOS 26.0, visionOS 26.0, macOS 26.0, *) @available(watchOS, unavailable) public typealias RawValue = Swift.Int public var rawValue: Swift.Int { get } } public struct FrameFlags : Swift.OptionSet, Swift.Sendable { public var rawValue: Swift.UInt32 public init(rawValue: Swift.UInt32) public static let currentBufferWillBeNextReferenceBuffer: VideoToolbox.VTMotionEstimationSession.FrameFlags @available(iOS 26.0, tvOS 26.0, visionOS 26.0, macOS 26.0, *) @available(watchOS, unavailable) public typealias ArrayLiteralElement = VideoToolbox.VTMotionEstimationSession.FrameFlags @available(iOS 26.0, tvOS 26.0, visionOS 26.0, macOS 26.0, *) @available(watchOS, unavailable) public typealias Element = VideoToolbox.VTMotionEstimationSession.FrameFlags @available(iOS 26.0, tvOS 26.0, visionOS 26.0, macOS 26.0, *) @available(watchOS, unavailable) public typealias RawValue = Swift.UInt32 } public init(width: Swift.UInt32, height: Swift.UInt32, motionVectorSize: VideoToolbox.VTMotionEstimationSession.BlockSize = .blockSize16x16, useMultiPassSearch: Swift.Bool = false, label: Swift.String? = nil) throws public struct Motion : Swift.Sendable { public var _motionVector: CoreVideo.CVReadOnlyPixelBuffer public var motionVector: CoreVideo.CVReadOnlyPixelBuffer { get } } final public var label: Swift.String? { get } final public var motionVectorSize: VideoToolbox.VTMotionEstimationSession.BlockSize { get throws } final public var useMultiPassSearch: Swift.Bool { get } final public var sourcePixelBufferAttributes: [Swift.String : any Swift.Sendable] { get } final public func motion(of currentImage: CoreVideo.CVReadOnlyPixelBuffer, comparedTo referenceImage: CoreVideo.CVReadOnlyPixelBuffer, flags: VideoToolbox.VTMotionEstimationSession.FrameFlags = .init(rawValue:0)) async throws -> VideoToolbox.VTMotionEstimationSession.Motion @objc deinit } @available(macOS 15.4, iOS 26.0, *) @available(watchOS, unavailable) @available(tvOS, unavailable) @available(visionOS, unavailable) extension VideoToolbox.VTOpticalFlowConfiguration { @available(watchOS, unavailable) @available(tvOS, unavailable) @available(visionOS, unavailable) @available(iOS, unavailable) @available(macOS, introduced: 15.4, deprecated: 15.4, message: "use supportedPixelFormats: [OSType] instead") public var frameSupportedPixelFormats: [Foundation.NSNumber] { get } public var supportedPixelFormats: [Darwin.OSType] { get } } @available(macOS 15.4, iOS 26.0, *) @available(watchOS, unavailable) @available(tvOS, unavailable) @available(visionOS, unavailable) extension VideoToolbox.VTMotionBlurConfiguration { @available(watchOS, unavailable) @available(tvOS, unavailable) @available(visionOS, unavailable) @available(iOS, unavailable) @available(macOS, introduced: 15.4, deprecated: 15.4, message: "use supportedPixelFormats: [OSType] instead") public var frameSupportedPixelFormats: [Foundation.NSNumber] { get } public var supportedPixelFormats: [Darwin.OSType] { get } } @available(macOS 14.0, iOS 17.0, tvOS 17.0, visionOS 1.0, *) @available(watchOS, unavailable) public func VTDecompressionSessionCreate(allocator: CoreFoundation.CFAllocator?, formatDescription videoFormatDescription: CoreMedia.CMVideoFormatDescription, decoderSpecification videoDecoderSpecification: CoreFoundation.CFDictionary?, imageBufferAttributes destinationImageBufferAttributes: CoreFoundation.CFDictionary?, decompressionSessionOut: Swift.UnsafeMutablePointer) -> Darwin.OSStatus @available(macOS 14.0, iOS 17.0, visionOS 1.0, *) @available(tvOS, unavailable) @available(watchOS, unavailable) public func VTDecompressionSessionDecodeFrame(_ session: VideoToolbox.VTDecompressionSession, sampleBuffer: CoreMedia.CMSampleBuffer, flags decodeFlags: VideoToolbox.VTDecodeFrameFlags, infoFlagsOut: Swift.UnsafeMutablePointer?, completionHandler: @escaping @Sendable (_ status: Darwin.OSStatus, _ infoFlags: VideoToolbox.VTDecodeInfoFlags, _ imageBuffer: CoreVideo.CVImageBuffer?, _ taggedBuffers: [CoreMedia.CMTaggedBuffer]?, _ presentationTimeStamp: CoreMedia.CMTime, _ presentationDuration: CoreMedia.CMTime) -> Swift.Void) -> Darwin.OSStatus @available(macOS 26.0, iOS 26.0, tvOS 26.0, visionOS 26.0, *) @available(watchOS, unavailable) extension VideoToolbox.VTFrameProcessorFrame { public struct ReadOnlyFrame : Swift.Sendable { public var frame: CoreVideo.CVReadOnlyPixelBuffer public var timeStamp: CoreMedia.CMTime public init(frame: CoreVideo.CVReadOnlyPixelBuffer, timeStamp: CoreMedia.CMTime) } } @available(macOS 14.0, iOS 17.0, visionOS 1.0, *) @available(tvOS, unavailable) @available(watchOS, unavailable) public func VTCompressionSessionEncodeMultiImageFrame(_ session: VideoToolbox.VTCompressionSession, taggedBuffers: [CoreMedia.CMTaggedBuffer], presentationTimeStamp: CoreMedia.CMTime, duration: CoreMedia.CMTime, frameProperties: CoreFoundation.CFDictionary?, infoFlagsOut: Swift.UnsafeMutablePointer?, outputHandler: @escaping VideoToolbox.VTCompressionOutputHandler) -> Darwin.OSStatus @available(macOS 26.0, iOS 26.0, tvOS 26.0, visionOS 26.0, *) @available(watchOS, unavailable) extension VideoToolbox.VTFrameProcessor { public func process(parameters: any VideoToolbox.VTFrameProcessorParameters) -> some _Concurrency.AsyncSequence } @available(macOS 26.0, iOS 26.0, tvOS 26.0, visionOS 26.0, *) @available(watchOS, unavailable) extension VideoToolbox.VTFrameProcessorConfiguration { public var nextFrameCount: Swift.Int? { get } public var previousFrameCount: Swift.Int? { get } public static var maximumDimensions: CoreMedia.CMVideoDimensions? { get } public static var minimumDimensions: CoreMedia.CMVideoDimensions? { get } } @available(macOS 26.0, iOS 26.0, tvOS 26.0, visionOS 26.0, *) @available(watchOS, unavailable) extension VideoToolbox.VTFrameProcessorParameters { public var destinationFrame: VideoToolbox.VTFrameProcessorFrame? { get } public var destinationFrames: [VideoToolbox.VTFrameProcessorFrame]? { get } } @available(macOS 15.0, iOS 18.0, tvOS 18.0, visionOS 2.0, *) @available(watchOS, unavailable) extension VideoToolbox.VTHDRPerFrameMetadataGenerationSession.HDRFormat : Swift.Equatable {} @available(macOS 15.0, iOS 18.0, tvOS 18.0, visionOS 2.0, *) @available(watchOS, unavailable) extension VideoToolbox.VTHDRPerFrameMetadataGenerationSession.HDRFormat : Swift.Hashable {} @available(macOS 15.0, iOS 18.0, tvOS 18.0, visionOS 2.0, *) @available(watchOS, unavailable) extension VideoToolbox.VTHDRPerFrameMetadataGenerationSession.HDRFormat : Swift.RawRepresentable {} @available(macOS 26.0, iOS 26.0, tvOS 26.0, visionOS 26.0, *) @available(watchOS, unavailable) extension VideoToolbox.VTMotionEstimationSession.BlockSize : Swift.Equatable {} @available(macOS 26.0, iOS 26.0, tvOS 26.0, visionOS 26.0, *) @available(watchOS, unavailable) extension VideoToolbox.VTMotionEstimationSession.BlockSize : Swift.Hashable {} @available(macOS 26.0, iOS 26.0, tvOS 26.0, visionOS 26.0, *) @available(watchOS, unavailable) extension VideoToolbox.VTMotionEstimationSession.BlockSize : Swift.RawRepresentable {}