API Differences in Vision in macOS 15.0

It's more difficult than it used to be to find what changed in Apple SDKs. This page is automatically generated and shows what is new in Vision in the macOS 15.0 SDK.

Table of Contents

Protocols

BoundingBoxProviding
ImageProcessingRequest
PoseProviding
QuadrilateralProviding
StatefulRequest
TargetedRequest
VisionObservation
VisionRequest

Classes

DetectHumanBodyPose3DRequest
DetectTrajectoriesRequest
GeneratePersonSegmentationRequest
ImageRequestHandler
TargetedImageRequestHandler
TrackHomographicImageRegistrationRequest
TrackObjectRequest
TrackOpticalFlowRequest
TrackRectangleRequest
TrackTranslationalImageRegistrationRequest
VideoProcessor

Structs

AnimalBodyPoseObservation
BarcodeObservation
CalculateImageAestheticsScoresRequest
ClassificationObservation
ClassifyImageRequest
ContoursObservation
ContoursObservation
Contour
CoreMLFeatureValueObservation
CoreMLModelContainer
CoreMLRequest
DetectAnimalBodyPoseRequest
DetectBarcodesRequest
DetectContoursRequest
DetectDocumentSegmentationRequest
DetectFaceCaptureQualityRequest
DetectFaceLandmarksRequest
DetectFaceRectanglesRequest
DetectHorizonRequest
DetectHumanBodyPoseRequest
DetectHumanHandPoseRequest
DetectHumanRectanglesRequest
DetectRectanglesRequest
DetectTextRectanglesRequest
DetectedDocumentObservation
DetectedObjectObservation
FaceObservation
FaceObservation
CaptureQuality
FaceObservation
Landmarks2D
FaceObservation.Landmarks2D
Region
FeaturePrintObservation
GenerateAttentionBasedSaliencyImageRequest
GenerateForegroundInstanceMaskRequest
GenerateImageFeaturePrintRequest
GenerateObjectnessBasedSaliencyImageRequest
GeneratePersonInstanceMaskRequest
HorizonObservation
HumanBodyPose3DObservation
HumanBodyPoseObservation
HumanHandPoseObservation
HumanObservation
ImageAestheticsScoresObservation
ImageHomographicAlignmentObservation
ImageTranslationAlignmentObservation
InstanceMaskObservation
Joint
Joint3D
NormalizedCircle
NormalizedPoint
NormalizedRect
OpticalFlowObservation
PixelBufferObservation
RecognizeAnimalsRequest
RecognizeTextRequest
RecognizedObjectObservation
RecognizedText
RecognizedTextObservation
RectangleObservation
SaliencyImageObservation
TextObservation
TrajectoryObservation

Enums

BarcodeSymbology
Chirality
ComputeStage
CoordinateOrigin
ElementType
ImageCropAndScaleAction
RequestDescriptor
VisionError
VisionResult

Typealiases

Animal
public typealias RawValue = Swift.String
ComputationAccuracy
public typealias AllCases = [Vision.TrackOpticalFlowRequest.ComputationAccuracy]
JointName
public typealias RawValue = Swift.String
JointName
public typealias RawValue = Swift.String
JointName
public typealias RawValue = Swift.String
JointName
public typealias RawValue = Swift.String
JointsGroupName
public typealias AllCases = [Vision.HumanBodyPose3DObservation.JointsGroupName]
JointsGroupName
public typealias AllCases = [Vision.HumanHandPoseObservation.JointsGroupName]
JointsGroupName
public typealias AllCases = [Vision.HumanBodyPoseObservation.JointsGroupName]
JointsGroupName
public typealias AllCases = [Vision.AnimalBodyPoseObservation.JointsGroupName]
JointsGroupName
public typealias RawValue = Swift.String
JointsGroupName
public typealias RawValue = Swift.String
JointsGroupName
public typealias RawValue = Swift.String
JointsGroupName
public typealias RawValue = Swift.String
QualityLevel
public typealias AllCases = [Vision.GeneratePersonSegmentationRequest.QualityLevel]
RecognitionLevel
public typealias AllCases = [Vision.RecognizeTextRequest.RecognitionLevel]
TrackingLevel
public typealias AllCases = [Vision.TrackRectangleRequest.TrackingLevel]

Functions & Methods

AnimalBodyPoseObservation
public func encode(to encoder: any Swift.Encoder) throws
BarcodeObservation
public func encode(to encoder: any Swift.Encoder) throws
ClassificationObservation
public func encode(to encoder: any Swift.Encoder) throws
ContoursObservation
public func encode(to encoder: any Swift.Encoder) throws
CoreMLFeatureValueObservation
public func encode(to encoder: any Swift.Encoder) throws
DetectedDocumentObservation
public func encode(to encoder: any Swift.Encoder) throws
DetectedObjectObservation
public func encode(to encoder: any Swift.Encoder) throws
FaceObservation
public func encode(to encoder: any Swift.Encoder) throws
FeaturePrintObservation
public func encode(to encoder: any Swift.Encoder) throws
HorizonObservation
public func encode(to encoder: any Swift.Encoder) throws
HumanBodyPose3DObservation
public func encode(to encoder: any Swift.Encoder) throws
HumanBodyPoseObservation
public func encode(to encoder: any Swift.Encoder) throws
HumanHandPoseObservation
public func encode(to encoder: any Swift.Encoder) throws
HumanObservation
public func encode(to encoder: any Swift.Encoder) throws
ImageAestheticsScoresObservation
public func encode(to encoder: any Swift.Encoder) throws
ImageHomographicAlignmentObservation
public func encode(to encoder: any Swift.Encoder) throws
ImageProcessingRequest
public func perform(on pixelBuffer: CoreVideo.CVPixelBuffer, orientation: ImageIO.CGImagePropertyOrientation? = nil) async throws -> Self.Result
ImageProcessingRequest
public func perform(on url: Foundation.URL, orientation: ImageIO.CGImagePropertyOrientation? = nil) async throws -> Self.Result
ImageProcessingRequest
public func perform(on image: CoreGraphics.CGImage, orientation: ImageIO.CGImagePropertyOrientation? = nil) async throws -> Self.Result
ImageProcessingRequest
public func perform(on image: CoreImage.CIImage, orientation: ImageIO.CGImagePropertyOrientation? = nil) async throws -> Self.Result
ImageProcessingRequest
public func perform(on sampleBuffer: CoreMedia.CMSampleBuffer, orientation: ImageIO.CGImagePropertyOrientation? = nil) async throws -> Self.Result
ImageProcessingRequest
public func perform(on data: Foundation.Data, orientation: ImageIO.CGImagePropertyOrientation? = nil) async throws -> Self.Result
ImageTranslationAlignmentObservation
public func encode(to encoder: any Swift.Encoder) throws
InstanceMaskObservation
public func encode(to encoder: any Swift.Encoder) throws
RecognizedObjectObservation
public func encode(to encoder: any Swift.Encoder) throws
RecognizedText
public func encode(to encoder: any Swift.Encoder) throws
RecognizedTextObservation
public func encode(to encoder: any Swift.Encoder) throws
RectangleObservation
public func encode(to encoder: any Swift.Encoder) throws
SaliencyImageObservation
public func encode(to encoder: any Swift.Encoder) throws
StatefulRequest
public static func == (lhs: Self, rhs: Self) -> Swift.Bool
StatefulRequest
public func hash(into hasher: inout Swift.Hasher)
TextObservation
public func encode(to encoder: any Swift.Encoder) throws
TrajectoryObservation
public func encode(to encoder: any Swift.Encoder) throws
VisionObservation
public func hash(into hasher: inout Swift.Hasher)
VisionRequest
public func computeDevice(for computeStage: Vision.ComputeStage) -> CoreML.MLComputeDevice?
VisionResult
public static func == (lhs: Vision.VisionResult, rhs: Vision.VisionResult) -> Swift.Bool

Properties & Constants

QuadrilateralProviding
public var boundingBox: Vision.NormalizedRect
StatefulRequest
public var minimumLatencyFrameCount: Swift.Int
VisionError
public var description: Swift.String
VisionError
public var errorDescription: Swift.String?
VisionRequest
public var description: Swift.String
VisionResult
public var description: Swift.String

Conformances

AnimalBodyPoseObservation.JointName
Swift.RawRepresentable
AnimalBodyPoseObservation.JointsGroupName
Swift.RawRepresentable
AnimalBodyPoseObservation
Swift.Codable
BarcodeObservation
Swift.Codable
ClassificationObservation
Swift.Codable
ContoursObservation
Swift.Codable
CoordinateOrigin
Swift.Equatable
CoordinateOrigin
Swift.Hashable
CoreMLFeatureValueObservation
Swift.Codable
DetectedDocumentObservation
Swift.Codable
DetectedObjectObservation
Swift.Codable
FaceObservation
Swift.Codable
FeaturePrintObservation
Swift.Codable
HorizonObservation
Swift.Codable
HumanBodyPose3DObservation.JointName
Swift.RawRepresentable
HumanBodyPose3DObservation.JointsGroupName
Swift.RawRepresentable
HumanBodyPose3DObservation
Swift.Codable
HumanBodyPoseObservation.JointName
Swift.RawRepresentable
HumanBodyPoseObservation.JointsGroupName
Swift.RawRepresentable
HumanBodyPoseObservation
Swift.Codable
HumanHandPoseObservation.JointName
Swift.RawRepresentable
HumanHandPoseObservation.JointsGroupName
Swift.RawRepresentable
HumanHandPoseObservation
Swift.Codable
HumanObservation
Swift.Codable
ImageAestheticsScoresObservation
Swift.Codable
ImageHomographicAlignmentObservation
Swift.Codable
ImageTranslationAlignmentObservation
Swift.Codable
InstanceMaskObservation
Swift.Codable
RecognizeAnimalsRequest.Animal
Swift.RawRepresentable
RecognizedObjectObservation
Swift.Codable
RecognizedText
Swift.Codable
RecognizedTextObservation
Swift.Codable
RectangleObservation
Swift.Codable
SaliencyImageObservation
Swift.Codable
TextObservation
Swift.Codable
TrajectoryObservation
Swift.Codable
VisionError
Foundation.LocalizedError
VisionResult
Swift.CustomStringConvertible
VisionResult
Swift.Equatable

Initializers

AnimalBodyPoseObservation
public init(from decoder: any Swift.Decoder) throws
AnimalBodyPoseObservation
public init(_ observation: Vision.VNAnimalBodyPoseObservation)
BarcodeObservation
public init(from decoder: any Swift.Decoder) throws
BarcodeObservation
public init(_ observation: Vision.VNBarcodeObservation)
ClassificationObservation
public init(from decoder: any Swift.Decoder) throws
ClassificationObservation
public init(_ observation: Vision.VNClassificationObservation)
ContoursObservation
public init(from decoder: any Swift.Decoder) throws
ContoursObservation
public init(_ observation: Vision.VNContoursObservation)
CoreMLFeatureValueObservation
public init(from decoder: any Swift.Decoder) throws
CoreMLFeatureValueObservation
public init?(_ observation: Vision.VNCoreMLFeatureValueObservation)
DetectedDocumentObservation
public init(from decoder: any Swift.Decoder) throws
DetectedDocumentObservation
public init?(_ observation: Vision.VNRectangleObservation)
DetectedObjectObservation
public init(from decoder: any Swift.Decoder) throws
DetectedObjectObservation
public init(_ observation: Vision.VNDetectedObjectObservation)
FaceObservation
public init(from decoder: any Swift.Decoder) throws
FaceObservation
public init(_ observation: Vision.VNFaceObservation)
FeaturePrintObservation
public init(from decoder: any Swift.Decoder) throws
FeaturePrintObservation
public init(_ observation: Vision.VNFeaturePrintObservation)
HorizonObservation
public init(from decoder: any Swift.Decoder) throws
HorizonObservation
public init(_ observation: Vision.VNHorizonObservation)
HumanBodyPose3DObservation
public init(_ observation: Vision.VNHumanBodyPose3DObservation)
HumanBodyPose3DObservation
public init(from decoder: any Swift.Decoder) throws
HumanBodyPoseObservation
public init(from decoder: any Swift.Decoder) throws
HumanBodyPoseObservation
public init(_ observation: Vision.VNHumanBodyPoseObservation)
HumanHandPoseObservation
public init(from decoder: any Swift.Decoder) throws
HumanHandPoseObservation
public init(_ observation: Vision.VNHumanHandPoseObservation)
HumanObservation
public init(from decoder: any Swift.Decoder) throws
HumanObservation
public init(_ observation: Vision.VNHumanObservation)
ImageAestheticsScoresObservation
public init(from decoder: any Swift.Decoder) throws
ImageAestheticsScoresObservation
public init(_ observation: Vision.VNImageAestheticsScoresObservation)
ImageHomographicAlignmentObservation
public init(from decoder: any Swift.Decoder) throws
ImageHomographicAlignmentObservation
public init(_ observation: Vision.VNImageHomographicAlignmentObservation)
ImageTranslationAlignmentObservation
public init(from decoder: any Swift.Decoder) throws
InstanceMaskObservation
public init(from decoder: any Swift.Decoder) throws
InstanceMaskObservation
public init?(_ observation: Vision.VNInstanceMaskObservation)
OpticalFlowObservation
public init?(_ observation: Vision.VNPixelBufferObservation)
PixelBufferObservation
public init?(_ observation: Vision.VNPixelBufferObservation)
RecognizedObjectObservation
public init(from decoder: any Swift.Decoder) throws
RecognizedObjectObservation
public init(_ observation: Vision.VNRecognizedObjectObservation)
RecognizedText
public init(from decoder: any Swift.Decoder) throws
RecognizedTextObservation
public init(from decoder: any Swift.Decoder) throws
RecognizedTextObservation
public init(_ observation: Vision.VNRecognizedTextObservation)
RectangleObservation
public init(from decoder: any Swift.Decoder) throws
RectangleObservation
public init(_ observation: Vision.VNRectangleObservation)
SaliencyImageObservation
public init(from decoder: any Swift.Decoder) throws
SaliencyImageObservation
public init?(_ observation: Vision.VNSaliencyImageObservation)
TextObservation
public init(from decoder: any Swift.Decoder) throws
TextObservation
public init(_ observation: Vision.VNTextObservation)
TrajectoryObservation
public init(from decoder: any Swift.Decoder) throws
TrajectoryObservation
public init(_ observation: Vision.VNTrajectoryObservation)