diff --git a/tns-platform-declarations/.gitignore b/tns-platform-declarations/.gitignore
deleted file mode 100644
index 602eb8e1b..000000000
--- a/tns-platform-declarations/.gitignore
+++ /dev/null
@@ -1 +0,0 @@
-test.js
\ No newline at end of file
diff --git a/tns-platform-declarations/.npmignore b/tns-platform-declarations/.npmignore
index 96e29d9b8..17f9f0859 100644
--- a/tns-platform-declarations/.npmignore
+++ b/tns-platform-declarations/.npmignore
@@ -1,9 +1,7 @@
+*.tgz
+package
test.ts
test.js
test.js.map
tsconfig.json
-*.tgz
-package
-package1
-package2
-package3
\ No newline at end of file
+typings-gen.sh
\ No newline at end of file
diff --git a/tns-platform-declarations/ios/.gitignore b/tns-platform-declarations/ios/.gitignore
deleted file mode 100644
index f486e7971..000000000
--- a/tns-platform-declarations/ios/.gitignore
+++ /dev/null
@@ -1 +0,0 @@
-objc-x86_64
\ No newline at end of file
diff --git a/tns-platform-declarations/ios/ios.d.ts b/tns-platform-declarations/ios/ios.d.ts
index 6a8c24007..addb51afa 100644
--- a/tns-platform-declarations/ios/ios.d.ts
+++ b/tns-platform-declarations/ios/ios.d.ts
@@ -1,118 +1,118 @@
///
-///
-///
-///
-///
-///
-///
-///
-///
-///
-///
-///
-///
-///
-///
-///
-///
-///
-///
-///
-///
-///
-///
-///
-///
-///
-///
-///
-///
-///
-///
-///
-///
-///
-///
-///
-///
-///
-///
-///
-///
-///
-///
-///
-///
-///
-///
-///
-///
-///
-///
-///
-///
-///
-///
-///
-///
-///
-///
-///
-///
-///
-///
-///
-///
-///
-///
-///
-///
-///
-///
-///
-///
-///
-///
-///
-///
-///
-///
-///
-///
-///
-///
-///
-///
-///
-///
-///
-///
-///
-///
-///
-///
-///
-///
-///
-///
-///
-///
-///
-///
-///
-///
-///
-///
-///
-///
-///
-///
-///
-///
-///
-///
-///
-///
-///
+///
+///
+///
+///
+///
+///
+///
+///
+///
+///
+///
+///
+///
+///
+///
+///
+///
+///
+///
+///
+///
+///
+///
+///
+///
+///
+///
+///
+///
+///
+///
+///
+///
+///
+///
+///
+///
+///
+///
+///
+///
+///
+///
+///
+///
+///
+///
+///
+///
+///
+///
+///
+///
+///
+///
+///
+///
+///
+///
+///
+///
+///
+///
+///
+///
+///
+///
+///
+///
+///
+///
+///
+///
+///
+///
+///
+///
+///
+///
+///
+///
+///
+///
+///
+///
+///
+///
+///
+///
+///
+///
+///
+///
+///
+///
+///
+///
+///
+///
+///
+///
+///
+///
+///
+///
+///
+///
+///
+///
+///
+///
+///
+///
+///
+///
-declare function __collect(): void;
\ No newline at end of file
+declare function __collect(): void;
diff --git a/tns-platform-declarations/ios/objc-i386/objc!simd.d.ts b/tns-platform-declarations/ios/objc-i386/objc!simd.d.ts
deleted file mode 100644
index 80d2f2757..000000000
--- a/tns-platform-declarations/ios/objc-i386/objc!simd.d.ts
+++ /dev/null
@@ -1,60 +0,0 @@
-
-declare function simd_clamp(x: number, min: number, max: number): number;
-
-declare function simd_clampFunction(x: number, min: number, max: number): number;
-
-declare function simd_fast_recip(x: number): number;
-
-declare function simd_fast_recipFunction(x: number): number;
-
-declare function simd_fast_rsqrt(x: number): number;
-
-declare function simd_fast_rsqrtFunction(x: number): number;
-
-declare function simd_fract(x: number): number;
-
-declare function simd_fractFunction(x: number): number;
-
-declare function simd_max(x: number, y: number): number;
-
-declare function simd_maxFunction(x: number, y: number): number;
-
-declare function simd_min(x: number, y: number): number;
-
-declare function simd_minFunction(x: number, y: number): number;
-
-declare function simd_mix(x: number, y: number, t: number): number;
-
-declare function simd_mixFunction(x: number, y: number, t: number): number;
-
-declare function simd_muladd(x: number, y: number, z: number): number;
-
-declare function simd_muladdFunction(x: number, y: number, z: number): number;
-
-declare function simd_precise_recip(x: number): number;
-
-declare function simd_precise_recipFunction(x: number): number;
-
-declare function simd_precise_rsqrt(x: number): number;
-
-declare function simd_precise_rsqrtFunction(x: number): number;
-
-declare function simd_recip(x: number): number;
-
-declare function simd_recipFunction(x: number): number;
-
-declare function simd_rsqrt(x: number): number;
-
-declare function simd_rsqrtFunction(x: number): number;
-
-declare function simd_sign(x: number): number;
-
-declare function simd_signFunction(x: number): number;
-
-declare function simd_smoothstep(edge0: number, edge1: number, x: number): number;
-
-declare function simd_smoothstepFunction(edge0: number, edge1: number, x: number): number;
-
-declare function simd_step(edge: number, x: number): number;
-
-declare function simd_stepFunction(edge: number, x: number): number;
diff --git a/tns-platform-declarations/ios/objc-i386/objc!ARKit.d.ts b/tns-platform-declarations/ios/objc-x86_64/objc!ARKit.d.ts
similarity index 70%
rename from tns-platform-declarations/ios/objc-i386/objc!ARKit.d.ts
rename to tns-platform-declarations/ios/objc-x86_64/objc!ARKit.d.ts
index 7df0e1e99..7a86f33e1 100644
--- a/tns-platform-declarations/ios/objc-i386/objc!ARKit.d.ts
+++ b/tns-platform-declarations/ios/objc-x86_64/objc!ARKit.d.ts
@@ -1,5 +1,5 @@
-declare class ARAnchor extends NSObject implements NSCopying {
+declare class ARAnchor extends NSObject implements NSCopying, NSSecureCoding {
static alloc(): ARAnchor; // inherited from NSObject
@@ -7,7 +7,21 @@ declare class ARAnchor extends NSObject implements NSCopying {
readonly identifier: NSUUID;
+ readonly transform: simd_float4x4;
+
+ static readonly supportsSecureCoding: boolean; // inherited from NSSecureCoding
+
+ constructor(o: { coder: NSCoder; }); // inherited from NSCoding
+
+ constructor(o: { transform: simd_float4x4; });
+
copyWithZone(zone: interop.Pointer | interop.Reference): any;
+
+ encodeWithCoder(aCoder: NSCoder): void;
+
+ initWithCoder(aDecoder: NSCoder): this;
+
+ initWithTransform(transform: simd_float4x4): this;
}
declare var ARBlendShapeLocationBrowDownLeft: string;
@@ -118,13 +132,27 @@ declare class ARCamera extends NSObject implements NSCopying {
static new(): ARCamera; // inherited from NSObject
+ readonly eulerAngles: interop.Reference;
+
readonly imageResolution: CGSize;
+ readonly intrinsics: simd_float3x3;
+
+ readonly projectionMatrix: simd_float4x4;
+
readonly trackingState: ARTrackingState;
readonly trackingStateReason: ARTrackingStateReason;
+ readonly transform: simd_float4x4;
+
copyWithZone(zone: interop.Pointer | interop.Reference): any;
+
+ projectPointOrientationViewportSize(point: interop.Reference, orientation: UIInterfaceOrientation, viewportSize: CGSize): CGPoint;
+
+ projectionMatrixForOrientationViewportSizeZNearZFar(orientation: UIInterfaceOrientation, viewportSize: CGSize, zNear: number, zFar: number): simd_float4x4;
+
+ viewMatrixForOrientation(orientation: UIInterfaceOrientation): simd_float4x4;
}
declare class ARConfiguration extends NSObject implements NSCopying {
@@ -137,10 +165,14 @@ declare class ARConfiguration extends NSObject implements NSCopying {
providesAudioData: boolean;
+ videoFormat: ARVideoFormat;
+
worldAlignment: ARWorldAlignment;
static readonly isSupported: boolean;
+ static readonly supportedVideoFormats: NSArray;
+
copyWithZone(zone: interop.Pointer | interop.Reference): any;
}
@@ -150,6 +182,8 @@ declare class ARDirectionalLightEstimate extends ARLightEstimate {
static new(): ARDirectionalLightEstimate; // inherited from NSObject
+ readonly primaryLightDirection: interop.Reference;
+
readonly primaryLightIntensity: number;
readonly sphericalHarmonicsCoefficients: NSData;
@@ -165,7 +199,9 @@ declare const enum ARErrorCode {
CameraUnauthorized = 103,
- WorldTrackingFailed = 200
+ WorldTrackingFailed = 200,
+
+ InvalidReferenceImage = 300
}
declare var ARErrorDomain: string;
@@ -217,7 +253,7 @@ declare class ARFaceAnchor extends ARAnchor implements ARTrackable {
self(): this;
}
-declare class ARFaceGeometry extends NSObject implements NSCopying {
+declare class ARFaceGeometry extends NSObject implements NSCopying, NSSecureCoding {
static alloc(): ARFaceGeometry; // inherited from NSObject
@@ -225,17 +261,29 @@ declare class ARFaceGeometry extends NSObject implements NSCopying {
readonly textureCoordinateCount: number;
+ readonly textureCoordinates: interop.Pointer | interop.Reference>;
+
readonly triangleCount: number;
readonly triangleIndices: interop.Pointer | interop.Reference;
readonly vertexCount: number;
+ readonly vertices: interop.Pointer | interop.Reference>;
+
+ static readonly supportsSecureCoding: boolean; // inherited from NSSecureCoding
+
constructor(o: { blendShapes: NSDictionary; });
+ constructor(o: { coder: NSCoder; }); // inherited from NSCoding
+
copyWithZone(zone: interop.Pointer | interop.Reference): any;
+ encodeWithCoder(aCoder: NSCoder): void;
+
initWithBlendShapes(blendShapes: NSDictionary): this;
+
+ initWithCoder(aDecoder: NSCoder): this;
}
declare class ARFaceTrackingConfiguration extends ARConfiguration {
@@ -284,7 +332,11 @@ declare class ARHitTestResult extends NSObject {
readonly distance: number;
+ readonly localTransform: simd_float4x4;
+
readonly type: ARHitTestResultType;
+
+ readonly worldTransform: simd_float4x4;
}
declare const enum ARHitTestResultType {
@@ -293,9 +345,22 @@ declare const enum ARHitTestResultType {
EstimatedHorizontalPlane = 2,
+ EstimatedVerticalPlane = 4,
+
ExistingPlane = 8,
- ExistingPlaneUsingExtent = 16
+ ExistingPlaneUsingExtent = 16,
+
+ ExistingPlaneUsingGeometry = 32
+}
+
+declare class ARImageAnchor extends ARAnchor {
+
+ static alloc(): ARImageAnchor; // inherited from NSObject
+
+ static new(): ARImageAnchor; // inherited from NSObject
+
+ readonly referenceImage: ARReferenceImage;
}
declare class ARLightEstimate extends NSObject {
@@ -314,6 +379,8 @@ declare class AROrientationTrackingConfiguration extends ARConfiguration {
static alloc(): AROrientationTrackingConfiguration; // inherited from NSObject
static new(): AROrientationTrackingConfiguration; // inherited from NSObject
+
+ autoFocusEnabled: boolean;
}
declare class ARPlaneAnchor extends ARAnchor {
@@ -323,21 +390,62 @@ declare class ARPlaneAnchor extends ARAnchor {
static new(): ARPlaneAnchor; // inherited from NSObject
readonly alignment: ARPlaneAnchorAlignment;
+
+ readonly center: interop.Reference;
+
+ readonly extent: interop.Reference;
+
+ readonly geometry: ARPlaneGeometry;
}
declare const enum ARPlaneAnchorAlignment {
- Horizontal = 0
+ Horizontal = 0,
+
+ Vertical = 1
}
declare const enum ARPlaneDetection {
None = 0,
- Horizontal = 1
+ Horizontal = 1,
+
+ Vertical = 2
}
-declare class ARPointCloud extends NSObject {
+declare class ARPlaneGeometry extends NSObject implements NSSecureCoding {
+
+ static alloc(): ARPlaneGeometry; // inherited from NSObject
+
+ static new(): ARPlaneGeometry; // inherited from NSObject
+
+ readonly boundaryVertexCount: number;
+
+ readonly boundaryVertices: interop.Pointer | interop.Reference>;
+
+ readonly textureCoordinateCount: number;
+
+ readonly textureCoordinates: interop.Pointer | interop.Reference>;
+
+ readonly triangleCount: number;
+
+ readonly triangleIndices: interop.Pointer | interop.Reference;
+
+ readonly vertexCount: number;
+
+ readonly vertices: interop.Pointer | interop.Reference>;
+
+ static readonly supportsSecureCoding: boolean; // inherited from NSSecureCoding
+
+ constructor(o: { coder: NSCoder; }); // inherited from NSCoding
+
+ encodeWithCoder(aCoder: NSCoder): void;
+
+ initWithCoder(aDecoder: NSCoder): this;
+}
+
+declare class ARPointCloud extends NSObject implements NSSecureCoding {
static alloc(): ARPointCloud; // inherited from NSObject
@@ -346,6 +454,39 @@ declare class ARPointCloud extends NSObject {
readonly count: number;
readonly identifiers: interop.Pointer | interop.Reference;
+
+ readonly points: interop.Pointer | interop.Reference>;
+
+ static readonly supportsSecureCoding: boolean; // inherited from NSSecureCoding
+
+ constructor(o: { coder: NSCoder; }); // inherited from NSCoding
+
+ encodeWithCoder(aCoder: NSCoder): void;
+
+ initWithCoder(aDecoder: NSCoder): this;
+}
+
+declare class ARReferenceImage extends NSObject implements NSCopying {
+
+ static alloc(): ARReferenceImage; // inherited from NSObject
+
+ static new(): ARReferenceImage; // inherited from NSObject
+
+ static referenceImagesInGroupNamedBundle(name: string, bundle: NSBundle): NSSet;
+
+ name: string;
+
+ readonly physicalSize: CGSize;
+
+ constructor(o: { CGImage: any; orientation: CGImagePropertyOrientation; physicalWidth: number; });
+
+ constructor(o: { pixelBuffer: any; orientation: CGImagePropertyOrientation; physicalWidth: number; });
+
+ copyWithZone(zone: interop.Pointer | interop.Reference): any;
+
+ initWithCGImageOrientationPhysicalWidth(image: any, orientation: CGImagePropertyOrientation, physicalWidth: number): this;
+
+ initWithPixelBufferOrientationPhysicalWidth(pixelBuffer: any, orientation: CGImagePropertyOrientation, physicalWidth: number): this;
}
declare var ARSCNDebugOptionShowFeaturePoints: SCNDebugOptions;
@@ -368,13 +509,30 @@ declare class ARSCNFaceGeometry extends SCNGeometry {
static geometryWithMDLMesh(mdlMesh: MDLMesh): ARSCNFaceGeometry; // inherited from SCNGeometry
- static geometryWithSourcesElements(sources: NSArray, elements: NSArray): ARSCNFaceGeometry; // inherited from SCNGeometry
+ static geometryWithSourcesElements(sources: NSArray | SCNGeometrySource[], elements: NSArray | SCNGeometryElement[]): ARSCNFaceGeometry; // inherited from SCNGeometry
static new(): ARSCNFaceGeometry; // inherited from NSObject
updateFromFaceGeometry(faceGeometry: ARFaceGeometry): void;
}
+declare class ARSCNPlaneGeometry extends SCNGeometry {
+
+ static alloc(): ARSCNPlaneGeometry; // inherited from NSObject
+
+ static geometry(): ARSCNPlaneGeometry; // inherited from SCNGeometry
+
+ static geometryWithMDLMesh(mdlMesh: MDLMesh): ARSCNPlaneGeometry; // inherited from SCNGeometry
+
+ static geometryWithSourcesElements(sources: NSArray | SCNGeometrySource[], elements: NSArray | SCNGeometryElement[]): ARSCNPlaneGeometry; // inherited from SCNGeometry
+
+ static new(): ARSCNPlaneGeometry; // inherited from NSObject
+
+ static planeGeometryWithDevice(device: MTLDevice): ARSCNPlaneGeometry;
+
+ updateFromPlaneGeometry(planeGeometry: ARPlaneGeometry): void;
+}
+
declare class ARSCNView extends SCNView {
static alloc(): ARSCNView; // inherited from NSObject
@@ -385,11 +543,11 @@ declare class ARSCNView extends SCNView {
static appearanceForTraitCollectionWhenContainedIn(trait: UITraitCollection, ContainerClass: typeof NSObject): ARSCNView; // inherited from UIAppearance
- static appearanceForTraitCollectionWhenContainedInInstancesOfClasses(trait: UITraitCollection, containerTypes: NSArray): ARSCNView; // inherited from UIAppearance
+ static appearanceForTraitCollectionWhenContainedInInstancesOfClasses(trait: UITraitCollection, containerTypes: NSArray | typeof NSObject[]): ARSCNView; // inherited from UIAppearance
static appearanceWhenContainedIn(ContainerClass: typeof NSObject): ARSCNView; // inherited from UIAppearance
- static appearanceWhenContainedInInstancesOfClasses(containerTypes: NSArray): ARSCNView; // inherited from UIAppearance
+ static appearanceWhenContainedInInstancesOfClasses(containerTypes: NSArray | typeof NSObject[]): ARSCNView; // inherited from UIAppearance
static new(): ARSCNView; // inherited from NSObject
@@ -433,11 +591,11 @@ declare class ARSKView extends SKView {
static appearanceForTraitCollectionWhenContainedIn(trait: UITraitCollection, ContainerClass: typeof NSObject): ARSKView; // inherited from UIAppearance
- static appearanceForTraitCollectionWhenContainedInInstancesOfClasses(trait: UITraitCollection, containerTypes: NSArray): ARSKView; // inherited from UIAppearance
+ static appearanceForTraitCollectionWhenContainedInInstancesOfClasses(trait: UITraitCollection, containerTypes: NSArray | typeof NSObject[]): ARSKView; // inherited from UIAppearance
static appearanceWhenContainedIn(ContainerClass: typeof NSObject): ARSKView; // inherited from UIAppearance
- static appearanceWhenContainedInInstancesOfClasses(containerTypes: NSArray): ARSKView; // inherited from UIAppearance
+ static appearanceWhenContainedInInstancesOfClasses(containerTypes: NSArray | typeof NSObject[]): ARSKView; // inherited from UIAppearance
static new(): ARSKView; // inherited from NSObject
@@ -492,15 +650,17 @@ declare class ARSession extends NSObject {
runWithConfiguration(configuration: ARConfiguration): void;
runWithConfigurationOptions(configuration: ARConfiguration, options: ARSessionRunOptions): void;
+
+ setWorldOrigin(relativeTransform: simd_float4x4): void;
}
interface ARSessionDelegate extends ARSessionObserver {
- sessionDidAddAnchors?(session: ARSession, anchors: NSArray): void;
+ sessionDidAddAnchors?(session: ARSession, anchors: NSArray | ARAnchor[]): void;
- sessionDidRemoveAnchors?(session: ARSession, anchors: NSArray): void;
+ sessionDidRemoveAnchors?(session: ARSession, anchors: NSArray | ARAnchor[]): void;
- sessionDidUpdateAnchors?(session: ARSession, anchors: NSArray): void;
+ sessionDidUpdateAnchors?(session: ARSession, anchors: NSArray | ARAnchor[]): void;
sessionDidUpdateFrame?(session: ARSession, frame: ARFrame): void;
}
@@ -519,6 +679,8 @@ interface ARSessionObserver extends NSObjectProtocol {
sessionInterruptionEnded?(session: ARSession): void;
+ sessionShouldAttemptRelocalization?(session: ARSession): boolean;
+
sessionWasInterrupted?(session: ARSession): void;
}
declare var ARSessionObserver: {
@@ -559,7 +721,22 @@ declare const enum ARTrackingStateReason {
ExcessiveMotion = 2,
- InsufficientFeatures = 3
+ InsufficientFeatures = 3,
+
+ Relocalizing = 4
+}
+
+declare class ARVideoFormat extends NSObject implements NSCopying {
+
+ static alloc(): ARVideoFormat; // inherited from NSObject
+
+ static new(): ARVideoFormat; // inherited from NSObject
+
+ readonly framesPerSecond: number;
+
+ readonly imageResolution: CGSize;
+
+ copyWithZone(zone: interop.Pointer | interop.Reference): any;
}
declare const enum ARWorldAlignment {
@@ -577,5 +754,9 @@ declare class ARWorldTrackingConfiguration extends ARConfiguration {
static new(): ARWorldTrackingConfiguration; // inherited from NSObject
+ autoFocusEnabled: boolean;
+
+ detectionImages: NSSet;
+
planeDetection: ARPlaneDetection;
}
diff --git a/tns-platform-declarations/ios/objc-i386/objc!AVFoundation.d.ts b/tns-platform-declarations/ios/objc-x86_64/objc!AVFoundation.d.ts
similarity index 96%
rename from tns-platform-declarations/ios/objc-i386/objc!AVFoundation.d.ts
rename to tns-platform-declarations/ios/objc-x86_64/objc!AVFoundation.d.ts
index c78f2de04..00868f6df 100644
--- a/tns-platform-declarations/ios/objc-i386/objc!AVFoundation.d.ts
+++ b/tns-platform-declarations/ios/objc-x86_64/objc!AVFoundation.d.ts
@@ -74,13 +74,13 @@ declare class AVAsset extends NSObject implements AVAsynchronousKeyValueLoading,
cancelLoading(): void;
- chapterMetadataGroupsBestMatchingPreferredLanguages(preferredLanguages: NSArray): NSArray;
+ chapterMetadataGroupsBestMatchingPreferredLanguages(preferredLanguages: NSArray | string[]): NSArray;
- chapterMetadataGroupsWithTitleLocaleContainingItemsWithCommonKeys(locale: NSLocale, commonKeys: NSArray): NSArray;
+ chapterMetadataGroupsWithTitleLocaleContainingItemsWithCommonKeys(locale: NSLocale, commonKeys: NSArray | string[]): NSArray;
copyWithZone(zone: interop.Pointer | interop.Reference): any;
- loadValuesAsynchronouslyForKeysCompletionHandler(keys: NSArray, handler: () => void): void;
+ loadValuesAsynchronouslyForKeysCompletionHandler(keys: NSArray | string[], handler: () => void): void;
mediaSelectionGroupForMediaCharacteristic(mediaCharacteristic: string): AVMediaSelectionGroup;
@@ -114,13 +114,13 @@ interface AVAssetDownloadDelegate extends NSURLSessionTaskDelegate {
URLSessionAggregateAssetDownloadTaskDidCompleteForMediaSelection?(session: NSURLSession, aggregateAssetDownloadTask: AVAggregateAssetDownloadTask, mediaSelection: AVMediaSelection): void;
- URLSessionAggregateAssetDownloadTaskDidLoadTimeRangeTotalTimeRangesLoadedTimeRangeExpectedToLoadForMediaSelection?(session: NSURLSession, aggregateAssetDownloadTask: AVAggregateAssetDownloadTask, timeRange: CMTimeRange, loadedTimeRanges: NSArray, timeRangeExpectedToLoad: CMTimeRange, mediaSelection: AVMediaSelection): void;
+ URLSessionAggregateAssetDownloadTaskDidLoadTimeRangeTotalTimeRangesLoadedTimeRangeExpectedToLoadForMediaSelection?(session: NSURLSession, aggregateAssetDownloadTask: AVAggregateAssetDownloadTask, timeRange: CMTimeRange, loadedTimeRanges: NSArray | NSValue[], timeRangeExpectedToLoad: CMTimeRange, mediaSelection: AVMediaSelection): void;
URLSessionAggregateAssetDownloadTaskWillDownloadToURL?(session: NSURLSession, aggregateAssetDownloadTask: AVAggregateAssetDownloadTask, location: NSURL): void;
URLSessionAssetDownloadTaskDidFinishDownloadingToURL?(session: NSURLSession, assetDownloadTask: AVAssetDownloadTask, location: NSURL): void;
- URLSessionAssetDownloadTaskDidLoadTimeRangeTotalTimeRangesLoadedTimeRangeExpectedToLoad?(session: NSURLSession, assetDownloadTask: AVAssetDownloadTask, timeRange: CMTimeRange, loadedTimeRanges: NSArray, timeRangeExpectedToLoad: CMTimeRange): void;
+ URLSessionAssetDownloadTaskDidLoadTimeRangeTotalTimeRangesLoadedTimeRangeExpectedToLoad?(session: NSURLSession, assetDownloadTask: AVAssetDownloadTask, timeRange: CMTimeRange, loadedTimeRanges: NSArray | NSValue[], timeRangeExpectedToLoad: CMTimeRange): void;
URLSessionAssetDownloadTaskDidResolveMediaSelection?(session: NSURLSession, assetDownloadTask: AVAssetDownloadTask, resolvedMediaSelection: AVMediaSelection): void;
}
@@ -184,7 +184,7 @@ declare class AVAssetDownloadURLSession extends NSURLSession {
static sessionWithConfigurationAssetDownloadDelegateDelegateQueue(configuration: NSURLSessionConfiguration, delegate: AVAssetDownloadDelegate, delegateQueue: NSOperationQueue): AVAssetDownloadURLSession;
- aggregateAssetDownloadTaskWithURLAssetMediaSelectionsAssetTitleAssetArtworkDataOptions(URLAsset: AVURLAsset, mediaSelections: NSArray, title: string, artworkData: NSData, options: NSDictionary): AVAggregateAssetDownloadTask;
+ aggregateAssetDownloadTaskWithURLAssetMediaSelectionsAssetTitleAssetArtworkDataOptions(URLAsset: AVURLAsset, mediaSelections: NSArray | AVMediaSelection[], title: string, artworkData: NSData, options: NSDictionary): AVAggregateAssetDownloadTask;
assetDownloadTaskWithURLAssetAssetTitleAssetArtworkDataOptions(URLAsset: AVURLAsset, title: string, artworkData: NSData, options: NSDictionary): AVAssetDownloadTask;
@@ -335,7 +335,7 @@ declare class AVAssetImageGenerator extends NSObject {
copyCGImageAtTimeActualTimeError(requestedTime: CMTime, actualTime: interop.Pointer | interop.Reference): any;
- generateCGImagesAsynchronouslyForTimesCompletionHandler(requestedTimes: NSArray, handler: (p1: CMTime, p2: any, p3: CMTime, p4: AVAssetImageGeneratorResult, p5: NSError) => void): void;
+ generateCGImagesAsynchronouslyForTimesCompletionHandler(requestedTimes: NSArray | NSValue[], handler: (p1: CMTime, p2: any, p3: CMTime, p4: AVAssetImageGeneratorResult, p5: NSError) => void): void;
initWithAsset(asset: AVAsset): this;
}
@@ -392,7 +392,7 @@ declare class AVAssetReaderAudioMixOutput extends AVAssetReaderOutput {
static alloc(): AVAssetReaderAudioMixOutput; // inherited from NSObject
- static assetReaderAudioMixOutputWithAudioTracksAudioSettings(audioTracks: NSArray, audioSettings: NSDictionary): AVAssetReaderAudioMixOutput;
+ static assetReaderAudioMixOutputWithAudioTracksAudioSettings(audioTracks: NSArray | AVAssetTrack[], audioSettings: NSDictionary): AVAssetReaderAudioMixOutput;
static new(): AVAssetReaderAudioMixOutput; // inherited from NSObject
@@ -404,9 +404,9 @@ declare class AVAssetReaderAudioMixOutput extends AVAssetReaderOutput {
readonly audioTracks: NSArray;
- constructor(o: { audioTracks: NSArray; audioSettings: NSDictionary; });
+ constructor(o: { audioTracks: NSArray | AVAssetTrack[]; audioSettings: NSDictionary; });
- initWithAudioTracksAudioSettings(audioTracks: NSArray, audioSettings: NSDictionary): this;
+ initWithAudioTracksAudioSettings(audioTracks: NSArray | AVAssetTrack[], audioSettings: NSDictionary): this;
}
declare class AVAssetReaderOutput extends NSObject {
@@ -425,7 +425,7 @@ declare class AVAssetReaderOutput extends NSObject {
markConfigurationAsFinal(): void;
- resetForReadingTimeRanges(timeRanges: NSArray): void;
+ resetForReadingTimeRanges(timeRanges: NSArray | NSValue[]): void;
}
declare class AVAssetReaderOutputMetadataAdaptor extends NSObject {
@@ -496,7 +496,7 @@ declare class AVAssetReaderVideoCompositionOutput extends AVAssetReaderOutput {
static alloc(): AVAssetReaderVideoCompositionOutput; // inherited from NSObject
- static assetReaderVideoCompositionOutputWithVideoTracksVideoSettings(videoTracks: NSArray, videoSettings: NSDictionary): AVAssetReaderVideoCompositionOutput;
+ static assetReaderVideoCompositionOutputWithVideoTracksVideoSettings(videoTracks: NSArray | AVAssetTrack[], videoSettings: NSDictionary): AVAssetReaderVideoCompositionOutput;
static new(): AVAssetReaderVideoCompositionOutput; // inherited from NSObject
@@ -508,9 +508,9 @@ declare class AVAssetReaderVideoCompositionOutput extends AVAssetReaderOutput {
readonly videoTracks: NSArray;
- constructor(o: { videoTracks: NSArray; videoSettings: NSDictionary; });
+ constructor(o: { videoTracks: NSArray | AVAssetTrack[]; videoSettings: NSDictionary; });
- initWithVideoTracksVideoSettings(videoTracks: NSArray, videoSettings: NSDictionary): this;
+ initWithVideoTracksVideoSettings(videoTracks: NSArray | AVAssetTrack[], videoSettings: NSDictionary): this;
}
declare const enum AVAssetReferenceRestrictions {
@@ -566,6 +566,8 @@ declare class AVAssetResourceLoadingContentInformationRequest extends NSObject {
static new(): AVAssetResourceLoadingContentInformationRequest; // inherited from NSObject
+ readonly allowedContentTypes: NSArray;
+
byteRangeAccessSupported: boolean;
contentLength: number;
@@ -694,7 +696,7 @@ declare class AVAssetTrack extends NSObject implements AVAsynchronousKeyValueLoa
hasMediaCharacteristic(mediaCharacteristic: string): boolean;
- loadValuesAsynchronouslyForKeysCompletionHandler(keys: NSArray, handler: () => void): void;
+ loadValuesAsynchronouslyForKeysCompletionHandler(keys: NSArray | string[], handler: () => void): void;
metadataForFormat(format: string): NSArray;
@@ -871,7 +873,7 @@ declare class AVAssetWriterInputGroup extends AVMediaSelectionGroup {
static alloc(): AVAssetWriterInputGroup; // inherited from NSObject
- static assetWriterInputGroupWithInputsDefaultInput(inputs: NSArray, defaultInput: AVAssetWriterInput): AVAssetWriterInputGroup;
+ static assetWriterInputGroupWithInputsDefaultInput(inputs: NSArray | AVAssetWriterInput[], defaultInput: AVAssetWriterInput): AVAssetWriterInputGroup;
static new(): AVAssetWriterInputGroup; // inherited from NSObject
@@ -879,9 +881,9 @@ declare class AVAssetWriterInputGroup extends AVMediaSelectionGroup {
readonly inputs: NSArray;
- constructor(o: { inputs: NSArray; defaultInput: AVAssetWriterInput; });
+ constructor(o: { inputs: NSArray | AVAssetWriterInput[]; defaultInput: AVAssetWriterInput; });
- initWithInputsDefaultInput(inputs: NSArray, defaultInput: AVAssetWriterInput): this;
+ initWithInputsDefaultInput(inputs: NSArray | AVAssetWriterInput[], defaultInput: AVAssetWriterInput): this;
}
declare var AVAssetWriterInputMediaDataLocationBeforeMainMediaDataNotInterleaved: string;
@@ -969,7 +971,7 @@ declare class AVAsynchronousCIImageFilteringRequest extends NSObject implements
interface AVAsynchronousKeyValueLoading {
- loadValuesAsynchronouslyForKeysCompletionHandler(keys: NSArray, handler: () => void): void;
+ loadValuesAsynchronouslyForKeysCompletionHandler(keys: NSArray | string[], handler: () => void): void;
statusOfValueForKeyError(key: string): AVKeyValueStatus;
}
@@ -1292,7 +1294,7 @@ declare class AVAudioEngine extends NSObject {
attachNode(node: AVAudioNode): void;
- connectToConnectionPointsFromBusFormat(sourceNode: AVAudioNode, destNodes: NSArray, sourceBus: number, format: AVAudioFormat): void;
+ connectToConnectionPointsFromBusFormat(sourceNode: AVAudioNode, destNodes: NSArray | AVAudioConnectionPoint[], sourceBus: number, format: AVAudioFormat): void;
connectToFormat(node1: AVAudioNode, node2: AVAudioNode, format: AVAudioFormat): void;
@@ -3098,7 +3100,7 @@ declare class AVAudioUnitSampler extends AVAudioUnitMIDIInstrument {
stereoPan: number;
- loadAudioFilesAtURLsError(audioFiles: NSArray): boolean;
+ loadAudioFilesAtURLsError(audioFiles: NSArray | NSURL[]): boolean;
loadInstrumentAtURLError(instrumentURL: NSURL): boolean;
@@ -3183,6 +3185,10 @@ declare class AVCameraCalibrationData extends NSObject {
static new(): AVCameraCalibrationData; // inherited from NSObject
+ readonly extrinsicMatrix: simd_float4x3;
+
+ readonly intrinsicMatrix: simd_float3x3;
+
readonly intrinsicMatrixReferenceDimensions: CGSize;
readonly inverseLensDistortionLookupTable: NSData;
@@ -3278,7 +3284,7 @@ declare class AVCaptureConnection extends NSObject {
static connectionWithInputPortVideoPreviewLayer(port: AVCaptureInputPort, layer: AVCaptureVideoPreviewLayer): AVCaptureConnection;
- static connectionWithInputPortsOutput(ports: NSArray, output: AVCaptureOutput): AVCaptureConnection;
+ static connectionWithInputPortsOutput(ports: NSArray | AVCaptureInputPort[], output: AVCaptureOutput): AVCaptureConnection;
static new(): AVCaptureConnection; // inherited from NSObject
@@ -3332,11 +3338,11 @@ declare class AVCaptureConnection extends NSObject {
constructor(o: { inputPort: AVCaptureInputPort; videoPreviewLayer: AVCaptureVideoPreviewLayer; });
- constructor(o: { inputPorts: NSArray; output: AVCaptureOutput; });
+ constructor(o: { inputPorts: NSArray | AVCaptureInputPort[]; output: AVCaptureOutput; });
initWithInputPortVideoPreviewLayer(port: AVCaptureInputPort, layer: AVCaptureVideoPreviewLayer): this;
- initWithInputPortsOutput(ports: NSArray, output: AVCaptureOutput): this;
+ initWithInputPortsOutput(ports: NSArray | AVCaptureInputPort[], output: AVCaptureOutput): this;
}
declare class AVCaptureDataOutputSynchronizer extends NSObject {
@@ -3351,9 +3357,9 @@ declare class AVCaptureDataOutputSynchronizer extends NSObject {
readonly delegateCallbackQueue: NSObject;
- constructor(o: { dataOutputs: NSArray; });
+ constructor(o: { dataOutputs: NSArray | AVCaptureOutput[]; });
- initWithDataOutputs(dataOutputs: NSArray): this;
+ initWithDataOutputs(dataOutputs: NSArray | AVCaptureOutput[]): this;
setDelegateQueue(delegate: AVCaptureDataOutputSynchronizerDelegate, delegateCallbackQueue: NSObject): void;
}
@@ -3517,6 +3523,8 @@ declare class AVCaptureDevice extends NSObject {
subjectAreaChangeMonitoringEnabled: boolean;
+ readonly systemPressureState: AVCaptureSystemPressureState;
+
readonly torchActive: boolean;
readonly torchAvailable: boolean;
@@ -3578,7 +3586,7 @@ declare class AVCaptureDeviceDiscoverySession extends NSObject {
static alloc(): AVCaptureDeviceDiscoverySession; // inherited from NSObject
- static discoverySessionWithDeviceTypesMediaTypePosition(deviceTypes: NSArray, mediaType: string, position: AVCaptureDevicePosition): AVCaptureDeviceDiscoverySession;
+ static discoverySessionWithDeviceTypesMediaTypePosition(deviceTypes: NSArray | string[], mediaType: string, position: AVCaptureDevicePosition): AVCaptureDeviceDiscoverySession;
static new(): AVCaptureDeviceDiscoverySession; // inherited from NSObject
@@ -3668,6 +3676,8 @@ declare var AVCaptureDeviceTypeBuiltInMicrophone: string;
declare var AVCaptureDeviceTypeBuiltInTelephotoCamera: string;
+declare var AVCaptureDeviceTypeBuiltInTrueDepthCamera: string;
+
declare var AVCaptureDeviceTypeBuiltInWideAngleCamera: string;
declare var AVCaptureDeviceWasConnectedNotification: string;
@@ -3716,9 +3726,9 @@ declare class AVCaptureFileOutput extends AVCaptureOutput {
interface AVCaptureFileOutputRecordingDelegate extends NSObjectProtocol {
- captureOutputDidFinishRecordingToOutputFileAtURLFromConnectionsError(output: AVCaptureFileOutput, outputFileURL: NSURL, connections: NSArray, error: NSError): void;
+ captureOutputDidFinishRecordingToOutputFileAtURLFromConnectionsError(output: AVCaptureFileOutput, outputFileURL: NSURL, connections: NSArray | AVCaptureConnection[], error: NSError): void;
- captureOutputDidStartRecordingToOutputFileAtURLFromConnections?(output: AVCaptureFileOutput, fileURL: NSURL, connections: NSArray): void;
+ captureOutputDidStartRecordingToOutputFileAtURLFromConnections?(output: AVCaptureFileOutput, fileURL: NSURL, connections: NSArray | AVCaptureConnection[]): void;
}
declare var AVCaptureFileOutputRecordingDelegate: {
@@ -3839,7 +3849,7 @@ declare class AVCaptureMetadataOutput extends AVCaptureOutput {
interface AVCaptureMetadataOutputObjectsDelegate extends NSObjectProtocol {
- captureOutputDidOutputMetadataObjectsFromConnection?(output: AVCaptureOutput, metadataObjects: NSArray, connection: AVCaptureConnection): void;
+ captureOutputDidOutputMetadataObjectsFromConnection?(output: AVCaptureOutput, metadataObjects: NSArray | AVMetadataObject[], connection: AVCaptureConnection): void;
}
declare var AVCaptureMetadataOutputObjectsDelegate: {
@@ -3944,9 +3954,9 @@ declare class AVCapturePhotoBracketSettings extends AVCapturePhotoSettings {
static new(): AVCapturePhotoBracketSettings; // inherited from NSObject
- static photoBracketSettingsWithRawPixelFormatTypeProcessedFormatBracketedSettings(rawPixelFormatType: number, processedFormat: NSDictionary, bracketedSettings: NSArray): AVCapturePhotoBracketSettings;
+ static photoBracketSettingsWithRawPixelFormatTypeProcessedFormatBracketedSettings(rawPixelFormatType: number, processedFormat: NSDictionary, bracketedSettings: NSArray | AVCaptureBracketedStillImageSettings[]): AVCapturePhotoBracketSettings;
- static photoBracketSettingsWithRawPixelFormatTypeRawFileTypeProcessedFormatProcessedFileTypeBracketedSettings(rawPixelFormatType: number, rawFileType: string, processedFormat: NSDictionary, processedFileType: string, bracketedSettings: NSArray): AVCapturePhotoBracketSettings;
+ static photoBracketSettingsWithRawPixelFormatTypeRawFileTypeProcessedFormatProcessedFileTypeBracketedSettings(rawPixelFormatType: number, rawFileType: string, processedFormat: NSDictionary, processedFileType: string, bracketedSettings: NSArray | AVCaptureBracketedStillImageSettings[]): AVCapturePhotoBracketSettings;
static photoSettings(): AVCapturePhotoBracketSettings; // inherited from AVCapturePhotoSettings
@@ -4052,7 +4062,7 @@ declare class AVCapturePhotoOutput extends AVCaptureOutput {
capturePhotoWithSettingsDelegate(settings: AVCapturePhotoSettings, delegate: AVCapturePhotoCaptureDelegate): void;
- setPreparedPhotoSettingsArrayCompletionHandler(preparedPhotoSettingsArray: NSArray, completionHandler: (p1: boolean, p2: NSError) => void): void;
+ setPreparedPhotoSettingsArrayCompletionHandler(preparedPhotoSettingsArray: NSArray | AVCapturePhotoSettings[], completionHandler: (p1: boolean, p2: NSError) => void): void;
supportedPhotoCodecTypesForFileType(fileType: string): NSArray;
@@ -4226,11 +4236,15 @@ declare const enum AVCaptureSessionInterruptionReason {
VideoDeviceInUseByAnotherClient = 3,
- VideoDeviceNotAvailableWithMultipleForegroundApps = 4
+ VideoDeviceNotAvailableWithMultipleForegroundApps = 4,
+
+ VideoDeviceNotAvailableDueToSystemPressure = 5
}
declare var AVCaptureSessionInterruptionReasonKey: string;
+declare var AVCaptureSessionInterruptionSystemPressureStateKey: string;
+
declare var AVCaptureSessionPreset1280x720: string;
declare var AVCaptureSessionPreset1920x1080: string;
@@ -4291,9 +4305,9 @@ declare class AVCaptureStillImageOutput extends AVCaptureOutput {
captureStillImageAsynchronouslyFromConnectionCompletionHandler(connection: AVCaptureConnection, handler: (p1: any, p2: NSError) => void): void;
- captureStillImageBracketAsynchronouslyFromConnectionWithSettingsArrayCompletionHandler(connection: AVCaptureConnection, settings: NSArray, handler: (p1: any, p2: AVCaptureBracketedStillImageSettings, p3: NSError) => void): void;
+ captureStillImageBracketAsynchronouslyFromConnectionWithSettingsArrayCompletionHandler(connection: AVCaptureConnection, settings: NSArray | AVCaptureBracketedStillImageSettings[], handler: (p1: any, p2: AVCaptureBracketedStillImageSettings, p3: NSError) => void): void;
- prepareToCaptureStillImageBracketFromConnectionWithSettingsArrayCompletionHandler(connection: AVCaptureConnection, settings: NSArray, handler: (p1: boolean, p2: NSError) => void): void;
+ prepareToCaptureStillImageBracketFromConnectionWithSettingsArrayCompletionHandler(connection: AVCaptureConnection, settings: NSArray | AVCaptureBracketedStillImageSettings[], handler: (p1: boolean, p2: NSError) => void): void;
}
declare class AVCaptureSynchronizedData extends NSObject {
@@ -4354,6 +4368,38 @@ declare class AVCaptureSynchronizedSampleBufferData extends AVCaptureSynchronize
readonly sampleBufferWasDropped: boolean;
}
+declare const enum AVCaptureSystemPressureFactors {
+
+ None = 0,
+
+ SystemTemperature = 1,
+
+ PeakPower = 2,
+
+ DepthModuleTemperature = 4
+}
+
+declare var AVCaptureSystemPressureLevelCritical: string;
+
+declare var AVCaptureSystemPressureLevelFair: string;
+
+declare var AVCaptureSystemPressureLevelNominal: string;
+
+declare var AVCaptureSystemPressureLevelSerious: string;
+
+declare var AVCaptureSystemPressureLevelShutdown: string;
+
+declare class AVCaptureSystemPressureState extends NSObject {
+
+ static alloc(): AVCaptureSystemPressureState; // inherited from NSObject
+
+ static new(): AVCaptureSystemPressureState; // inherited from NSObject
+
+ readonly factors: AVCaptureSystemPressureFactors;
+
+ readonly level: string;
+}
+
declare const enum AVCaptureTorchMode {
Off = 0,
@@ -4587,6 +4633,8 @@ declare class AVContentKeyRequest extends NSObject {
processContentKeyResponseError(error: NSError): void;
respondByRequestingPersistableContentKeyRequest(): void;
+
+ respondByRequestingPersistableContentKeyRequestAndReturnError(): boolean;
}
declare var AVContentKeyRequestProtocolVersionsKey: string;
@@ -4635,7 +4683,7 @@ declare class AVContentKeySession extends NSObject {
static pendingExpiredSessionReportsWithAppIdentifierStorageDirectoryAtURL(appIdentifier: NSData, storageURL: NSURL): NSArray;
- static removePendingExpiredSessionReportsWithAppIdentifierStorageDirectoryAtURL(expiredSessionReports: NSArray, appIdentifier: NSData, storageURL: NSURL): void;
+ static removePendingExpiredSessionReportsWithAppIdentifierStorageDirectoryAtURL(expiredSessionReports: NSArray | NSData[], appIdentifier: NSData, storageURL: NSURL): void;
readonly contentKeyRecipients: NSArray;
@@ -4701,11 +4749,11 @@ declare class AVDateRangeMetadataGroup extends AVMetadataGroup implements NSCopy
readonly startDate: Date;
- constructor(o: { items: NSArray; startDate: Date; endDate: Date; });
+ constructor(o: { items: NSArray | AVMetadataItem[]; startDate: Date; endDate: Date; });
copyWithZone(zone: interop.Pointer | interop.Reference): any;
- initWithItemsStartDateEndDate(items: NSArray, startDate: Date, endDate: Date): this;
+ initWithItemsStartDateEndDate(items: NSArray | AVMetadataItem[], startDate: Date, endDate: Date): this;
mutableCopyWithZone(zone: interop.Pointer | interop.Reference): any;
}
@@ -4905,7 +4953,9 @@ declare const enum AVError {
NoLongerPlayable = -11867,
- NoCompatibleAlternatesForExternalDisplay = -11868
+ NoCompatibleAlternatesForExternalDisplay = -11868,
+
+ NoSourceTrack = -11869
}
declare var AVErrorDeviceKey: string;
@@ -5105,17 +5155,17 @@ declare class AVMediaSelectionGroup extends NSObject implements NSCopying {
static alloc(): AVMediaSelectionGroup; // inherited from NSObject
- static mediaSelectionOptionsFromArrayFilteredAndSortedAccordingToPreferredLanguages(mediaSelectionOptions: NSArray, preferredLanguages: NSArray): NSArray;
+ static mediaSelectionOptionsFromArrayFilteredAndSortedAccordingToPreferredLanguages(mediaSelectionOptions: NSArray | AVMediaSelectionOption[], preferredLanguages: NSArray | string[]): NSArray;
- static mediaSelectionOptionsFromArrayWithLocale(mediaSelectionOptions: NSArray, locale: NSLocale): NSArray;
+ static mediaSelectionOptionsFromArrayWithLocale(mediaSelectionOptions: NSArray | AVMediaSelectionOption[], locale: NSLocale): NSArray;
- static mediaSelectionOptionsFromArrayWithMediaCharacteristics(mediaSelectionOptions: NSArray, mediaCharacteristics: NSArray): NSArray;
+ static mediaSelectionOptionsFromArrayWithMediaCharacteristics(mediaSelectionOptions: NSArray | AVMediaSelectionOption[], mediaCharacteristics: NSArray | string[]): NSArray;
- static mediaSelectionOptionsFromArrayWithoutMediaCharacteristics(mediaSelectionOptions: NSArray, mediaCharacteristics: NSArray): NSArray;
+ static mediaSelectionOptionsFromArrayWithoutMediaCharacteristics(mediaSelectionOptions: NSArray | AVMediaSelectionOption[], mediaCharacteristics: NSArray | string[]): NSArray;
static new(): AVMediaSelectionGroup; // inherited from NSObject
- static playableMediaSelectionOptionsFromArray(mediaSelectionOptions: NSArray): NSArray;
+ static playableMediaSelectionOptionsFromArray(mediaSelectionOptions: NSArray | AVMediaSelectionOption[]): NSArray;
readonly allowsEmptySelection: boolean;
@@ -6055,15 +6105,15 @@ declare class AVMetadataItem extends NSObject implements AVAsynchronousKeyValueL
static metadataItemWithPropertiesOfMetadataItemValueLoadingHandler(metadataItem: AVMetadataItem, handler: (p1: AVMetadataItemValueRequest) => void): AVMetadataItem;
- static metadataItemsFromArrayFilteredAndSortedAccordingToPreferredLanguages(metadataItems: NSArray, preferredLanguages: NSArray): NSArray;
+ static metadataItemsFromArrayFilteredAndSortedAccordingToPreferredLanguages(metadataItems: NSArray | AVMetadataItem[], preferredLanguages: NSArray | string[]): NSArray;
- static metadataItemsFromArrayFilteredByIdentifier(metadataItems: NSArray, identifier: string): NSArray;
+ static metadataItemsFromArrayFilteredByIdentifier(metadataItems: NSArray | AVMetadataItem[], identifier: string): NSArray;
- static metadataItemsFromArrayFilteredByMetadataItemFilter(metadataItems: NSArray, metadataItemFilter: AVMetadataItemFilter): NSArray;
+ static metadataItemsFromArrayFilteredByMetadataItemFilter(metadataItems: NSArray | AVMetadataItem[], metadataItemFilter: AVMetadataItemFilter): NSArray;
- static metadataItemsFromArrayWithKeyKeySpace(metadataItems: NSArray, key: any, keySpace: string): NSArray;
+ static metadataItemsFromArrayWithKeyKeySpace(metadataItems: NSArray | AVMetadataItem[], key: any, keySpace: string): NSArray;
- static metadataItemsFromArrayWithLocale(metadataItems: NSArray, locale: NSLocale): NSArray;
+ static metadataItemsFromArrayWithLocale(metadataItems: NSArray | AVMetadataItem[], locale: NSLocale): NSArray;
static new(): AVMetadataItem; // inherited from NSObject
@@ -6101,7 +6151,7 @@ declare class AVMetadataItem extends NSObject implements AVAsynchronousKeyValueL
copyWithZone(zone: interop.Pointer | interop.Reference): any;
- loadValuesAsynchronouslyForKeysCompletionHandler(keys: NSArray, handler: () => void): void;
+ loadValuesAsynchronouslyForKeysCompletionHandler(keys: NSArray | string[], handler: () => void): void;
mutableCopyWithZone(zone: interop.Pointer | interop.Reference): any;
@@ -6595,13 +6645,13 @@ declare class AVMutableCompositionTrack extends AVCompositionTrack {
insertTimeRangeOfTrackAtTimeError(timeRange: CMTimeRange, track: AVAssetTrack, startTime: CMTime): boolean;
- insertTimeRangesOfTracksAtTimeError(timeRanges: NSArray, tracks: NSArray, startTime: CMTime): boolean;
+ insertTimeRangesOfTracksAtTimeError(timeRanges: NSArray | NSValue[], tracks: NSArray | AVAssetTrack[], startTime: CMTime): boolean;
removeTimeRange(timeRange: CMTimeRange): void;
scaleTimeRangeToDuration(timeRange: CMTimeRange, duration: CMTime): void;
- validateTrackSegmentsError(trackSegments: NSArray): boolean;
+ validateTrackSegmentsError(trackSegments: NSArray | AVCompositionTrackSegment[]): boolean;
}
declare class AVMutableDateRangeMetadataGroup extends AVDateRangeMetadataGroup {
@@ -6849,11 +6899,13 @@ declare class AVPlayer extends NSObject {
volume: number;
+ static readonly availableHDRModes: AVPlayerHDRMode;
+
constructor(o: { playerItem: AVPlayerItem; });
constructor(o: { URL: NSURL; });
- addBoundaryTimeObserverForTimesQueueUsingBlock(times: NSArray, queue: NSObject, block: () => void): any;
+ addBoundaryTimeObserverForTimesQueueUsingBlock(times: NSArray | NSValue[], queue: NSObject, block: () => void): any;
addPeriodicTimeObserverForIntervalQueueUsingBlock(interval: CMTime, queue: NSObject, block: (p1: CMTime) => void): any;
@@ -6905,6 +6957,17 @@ declare const enum AVPlayerActionAtItemEnd {
None = 2
}
+declare var AVPlayerAvailableHDRModesDidChangeNotification: string;
+
+declare const enum AVPlayerHDRMode {
+
+ HLG = 1,
+
+ HDR10 = 2,
+
+ DolbyVision = 4
+}
+
declare class AVPlayerItem extends NSObject implements NSCopying {
static alloc(): AVPlayerItem; // inherited from NSObject
@@ -6913,7 +6976,7 @@ declare class AVPlayerItem extends NSObject implements NSCopying {
static playerItemWithAsset(asset: AVAsset): AVPlayerItem;
- static playerItemWithAssetAutomaticallyLoadedAssetKeys(asset: AVAsset, automaticallyLoadedAssetKeys: NSArray): AVPlayerItem;
+ static playerItemWithAssetAutomaticallyLoadedAssetKeys(asset: AVAsset, automaticallyLoadedAssetKeys: NSArray | string[]): AVPlayerItem;
static playerItemWithURL(URL: NSURL): AVPlayerItem;
@@ -6993,7 +7056,7 @@ declare class AVPlayerItem extends NSObject implements NSCopying {
constructor(o: { asset: AVAsset; });
- constructor(o: { asset: AVAsset; automaticallyLoadedAssetKeys: NSArray; });
+ constructor(o: { asset: AVAsset; automaticallyLoadedAssetKeys: NSArray | string[]; });
constructor(o: { URL: NSURL; });
@@ -7015,7 +7078,7 @@ declare class AVPlayerItem extends NSObject implements NSCopying {
initWithAsset(asset: AVAsset): this;
- initWithAssetAutomaticallyLoadedAssetKeys(asset: AVAsset, automaticallyLoadedAssetKeys: NSArray): this;
+ initWithAssetAutomaticallyLoadedAssetKeys(asset: AVAsset, automaticallyLoadedAssetKeys: NSArray | string[]): this;
initWithURL(URL: NSURL): this;
@@ -7180,16 +7243,16 @@ declare class AVPlayerItemLegibleOutput extends AVPlayerItemOutput {
textStylingResolution: string;
- constructor(o: { mediaSubtypesForNativeRepresentation: NSArray; });
+ constructor(o: { mediaSubtypesForNativeRepresentation: NSArray | number[]; });
- initWithMediaSubtypesForNativeRepresentation(subtypes: NSArray): this;
+ initWithMediaSubtypesForNativeRepresentation(subtypes: NSArray | number[]): this;
setDelegateQueue(delegate: AVPlayerItemLegibleOutputPushDelegate, delegateQueue: NSObject): void;
}
interface AVPlayerItemLegibleOutputPushDelegate extends AVPlayerItemOutputPushDelegate {
- legibleOutputDidOutputAttributedStringsNativeSampleBuffersForItemTime?(output: AVPlayerItemLegibleOutput, strings: NSArray, nativeSamples: NSArray, itemTime: CMTime): void;
+ legibleOutputDidOutputAttributedStringsNativeSampleBuffersForItemTime?(output: AVPlayerItemLegibleOutput, strings: NSArray | NSAttributedString[], nativeSamples: NSArray | any[], itemTime: CMTime): void;
}
declare var AVPlayerItemLegibleOutputPushDelegate: {
@@ -7217,16 +7280,16 @@ declare class AVPlayerItemMetadataCollector extends AVPlayerItemMediaDataCollect
readonly delegateQueue: NSObject;
- constructor(o: { identifiers: NSArray; classifyingLabels: NSArray; });
+ constructor(o: { identifiers: NSArray | string[]; classifyingLabels: NSArray | string[]; });
- initWithIdentifiersClassifyingLabels(identifiers: NSArray, classifyingLabels: NSArray): this;
+ initWithIdentifiersClassifyingLabels(identifiers: NSArray | string[], classifyingLabels: NSArray | string[]): this;
setDelegateQueue(delegate: AVPlayerItemMetadataCollectorPushDelegate, delegateQueue: NSObject): void;
}
interface AVPlayerItemMetadataCollectorPushDelegate extends NSObjectProtocol {
- metadataCollectorDidCollectDateRangeMetadataGroupsIndexesOfNewGroupsIndexesOfModifiedGroups(metadataCollector: AVPlayerItemMetadataCollector, metadataGroups: NSArray, indexesOfNewGroups: NSIndexSet, indexesOfModifiedGroups: NSIndexSet): void;
+ metadataCollectorDidCollectDateRangeMetadataGroupsIndexesOfNewGroupsIndexesOfModifiedGroups(metadataCollector: AVPlayerItemMetadataCollector, metadataGroups: NSArray | AVDateRangeMetadataGroup[], indexesOfNewGroups: NSIndexSet, indexesOfModifiedGroups: NSIndexSet): void;
}
declare var AVPlayerItemMetadataCollectorPushDelegate: {
@@ -7245,16 +7308,16 @@ declare class AVPlayerItemMetadataOutput extends AVPlayerItemOutput {
readonly delegateQueue: NSObject;
- constructor(o: { identifiers: NSArray; });
+ constructor(o: { identifiers: NSArray | string[]; });
- initWithIdentifiers(identifiers: NSArray): this;
+ initWithIdentifiers(identifiers: NSArray | string[]): this;
setDelegateQueue(delegate: AVPlayerItemMetadataOutputPushDelegate, delegateQueue: NSObject): void;
}
interface AVPlayerItemMetadataOutputPushDelegate extends AVPlayerItemOutputPushDelegate {
- metadataOutputDidOutputTimedMetadataGroupsFromPlayerItemTrack?(output: AVPlayerItemMetadataOutput, groups: NSArray, track: AVPlayerItemTrack): void;
+ metadataOutputDidOutputTimedMetadataGroupsFromPlayerItemTrack?(output: AVPlayerItemMetadataOutput, groups: NSArray | AVTimedMetadataGroup[], track: AVPlayerItemTrack): void;
}
declare var AVPlayerItemMetadataOutputPushDelegate: {
@@ -7418,9 +7481,9 @@ declare class AVPlayerMediaSelectionCriteria extends NSObject {
readonly preferredMediaCharacteristics: NSArray;
- constructor(o: { preferredLanguages: NSArray; preferredMediaCharacteristics: NSArray; });
+ constructor(o: { preferredLanguages: NSArray | string[]; preferredMediaCharacteristics: NSArray | string[]; });
- initWithPreferredLanguagesPreferredMediaCharacteristics(preferredLanguages: NSArray, preferredMediaCharacteristics: NSArray): this;
+ initWithPreferredLanguagesPreferredMediaCharacteristics(preferredLanguages: NSArray | string[], preferredMediaCharacteristics: NSArray