diff --git a/packages/video_player/video_player_avfoundation/CHANGELOG.md b/packages/video_player/video_player_avfoundation/CHANGELOG.md index 72fb28c16fdb..e8dcfaca7b98 100644 --- a/packages/video_player/video_player_avfoundation/CHANGELOG.md +++ b/packages/video_player/video_player_avfoundation/CHANGELOG.md @@ -1,3 +1,7 @@ +## 2.9.2 + +* Refactors for improved testability. + ## 2.9.1 * Refactors native code for improved testability. diff --git a/packages/video_player/video_player_avfoundation/darwin/RunnerTests/TestClasses.swift b/packages/video_player/video_player_avfoundation/darwin/RunnerTests/TestClasses.swift new file mode 100644 index 000000000000..3856995b855c --- /dev/null +++ b/packages/video_player/video_player_avfoundation/darwin/RunnerTests/TestClasses.swift @@ -0,0 +1,293 @@ +// Copyright 2013 The Flutter Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +import AVFoundation +import Testing +import video_player_avfoundation + +#if os(iOS) + import Flutter + import UIKit +#else + import FlutterMacOS +#endif + +/// An AVPlayer subclass that records method call parameters for inspection. +// TODO(stuartmorgan): Replace with a protocol like the other classes. +@MainActor final class InspectableAVPlayer: AVPlayer { + private(set) nonisolated(unsafe) var beforeTolerance: NSNumber? + private(set) nonisolated(unsafe) var afterTolerance: NSNumber? + private(set) nonisolated(unsafe) var lastSeekTime: CMTime = .invalid + + override func seek( + to time: CMTime, + toleranceBefore: CMTime, + toleranceAfter: CMTime, + completionHandler: @escaping @Sendable (Bool) -> Void + ) { + beforeTolerance = NSNumber(value: toleranceBefore.value) + afterTolerance = NSNumber(value: toleranceAfter.value) + lastSeekTime = time + super.seek( + to: time, toleranceBefore: toleranceBefore, toleranceAfter: toleranceAfter, + completionHandler: completionHandler) + } +} + +final class TestAsset: NSObject, FVPAVAsset { + let duration: CMTime + let tracks: [AVAssetTrack]? + + var loadedTracksAsynchronously = false + + init(duration: CMTime = CMTime.zero, tracks: [AVAssetTrack]? = nil) { + self.duration = duration + self.tracks = tracks + super.init() + } + + func statusOfValue(forKey key: String, error outError: NSErrorPointer) -> AVKeyValueStatus { + return tracks == nil ? .loading : .loaded + } + + func loadValuesAsynchronously(forKeys keys: [String], completionHandler handler: (() -> Void)?) { + handler?() + } + + @available(macOS 12.0, iOS 15.0, *) + func loadTracks( + withMediaType mediaType: AVMediaType, + completionHandler: @escaping ([AVAssetTrack]?, Error?) -> Void + ) { + loadedTracksAsynchronously = true + completionHandler(tracks, nil) + } + + func tracks(withMediaType mediaType: AVMediaType) -> [AVAssetTrack] { + return tracks ?? [] + } +} + +final class StubPlayerItem: NSObject, FVPAVPlayerItem { + let asset: FVPAVAsset + var videoComposition: AVVideoComposition? + + init(asset: FVPAVAsset = TestAsset()) { + self.asset = asset + super.init() + } +} + +final class StubBinaryMessenger: NSObject, FlutterBinaryMessenger { + func send(onChannel channel: String, message: Data?) {} + func send( + onChannel channel: String, + message: Data?, + binaryReply callback: FlutterBinaryReply? = nil + ) {} + func setMessageHandlerOnChannel( + _ channel: String, + binaryMessageHandler handler: FlutterBinaryMessageHandler? = nil + ) -> FlutterBinaryMessengerConnection { + return 0 + } + func cleanUpConnection(_ connection: FlutterBinaryMessengerConnection) {} +} + +final class TestTextureRegistry: NSObject, FlutterTextureRegistry { + private(set) var registeredTexture = false + private(set) var unregisteredTexture = false + private(set) var textureFrameAvailableCount = 0 + + func register(_ texture: FlutterTexture) -> Int64 { + registeredTexture = true + return 1 + } + + func unregisterTexture(_ textureId: Int64) { + if textureId != 1 { + Issue.record("Unregistering texture with wrong ID") + } + unregisteredTexture = true + } + + func textureFrameAvailable(_ textureId: Int64) { + if textureId != 1 { + Issue.record("Texture frame available with wrong ID") + } + textureFrameAvailableCount += 1 + } +} + +final class StubViewProvider: NSObject, FVPViewProvider { + #if os(iOS) + var viewController: UIViewController? + init(viewController: UIViewController? = nil) { + self.viewController = viewController + super.init() + } + #else + var view: NSView? + init(view: NSView? = nil) { + self.view = view + super.init() + } + #endif +} + +final class StubAssetProvider: NSObject, FVPAssetProvider { + func lookupKey(forAsset asset: String) -> String? { + return asset + } + + func lookupKey(forAsset asset: String, fromPackage package: String) -> String? { + return asset + } +} + +final class TestPixelBufferSource: NSObject, FVPPixelBufferSource { + var pixelBuffer: CVPixelBuffer? + let videoOutput: AVPlayerItemVideoOutput + + override init() { + videoOutput = AVPlayerItemVideoOutput(pixelBufferAttributes: [ + kCVPixelBufferPixelFormatTypeKey as String: kCVPixelFormatType_32BGRA, + kCVPixelBufferIOSurfacePropertiesKey as String: [:] as [String: String], + ]) + super.init() + } + + func itemTime(forHostTime hostTimeInSeconds: CFTimeInterval) -> CMTime { + return CMTimeMakeWithSeconds(hostTimeInSeconds, preferredTimescale: 1000) + } + + func hasNewPixelBuffer(forItemTime itemTime: CMTime) -> Bool { + return pixelBuffer != nil + } + + func copyPixelBuffer( + forItemTime itemTime: CMTime, + itemTimeForDisplay: UnsafeMutablePointer? + ) -> CVPixelBuffer? { + let buffer = pixelBuffer + // Ownership is transferred to the caller. + pixelBuffer = nil + return buffer + } +} + +#if os(iOS) + final class TestAudioSession: NSObject, FVPAVAudioSession { + var category: AVAudioSession.Category = .ambient + var categoryOptions: AVAudioSession.CategoryOptions = [] + private(set) var setCategoryCalled = false + + func setCategory( + _ category: AVAudioSession.Category, + with options: AVAudioSession.CategoryOptions + ) throws { + setCategoryCalled = true + self.category = category + self.categoryOptions = options + } + } +#endif + +final class StubFVPAVFactory: NSObject, FVPAVFactory { + let player: AVPlayer + let playerItem: FVPAVPlayerItem + let pixelBufferSource: FVPPixelBufferSource? + #if os(iOS) + var audioSession: FVPAVAudioSession + #endif + + init( + player: AVPlayer? = nil, + playerItem: FVPAVPlayerItem? = nil, + pixelBufferSource: FVPPixelBufferSource? = nil + ) { + let dummyURL = URL(string: "https://flutter.dev")! + self.player = + player + ?? AVPlayer(playerItem: AVPlayerItem(url: dummyURL)) + self.playerItem = playerItem ?? StubPlayerItem() + self.pixelBufferSource = pixelBufferSource + #if os(iOS) + self.audioSession = TestAudioSession() + #endif + super.init() + } + + func urlAsset(with url: URL, options: [String: Any]?) -> FVPAVAsset { + return playerItem.asset + } + + func playerItem(with asset: FVPAVAsset) -> FVPAVPlayerItem { + return playerItem + } + + func player(with playerItem: FVPAVPlayerItem) -> AVPlayer { + return self.player + } + + func videoOutput(pixelBufferAttributes attributes: [String: Any]) -> FVPPixelBufferSource { + return pixelBufferSource ?? TestPixelBufferSource() + } + + #if os(iOS) + func sharedAudioSession() -> FVPAVAudioSession { + return audioSession + } + #endif +} + +final class StubFVPDisplayLink: NSObject, FVPDisplayLink { + var running: Bool = false + var duration: CFTimeInterval { + return 1.0 / 60.0 + } +} + +final class StubFVPDisplayLinkFactory: NSObject, FVPDisplayLinkFactory { + let displayLink = StubFVPDisplayLink() + var fireDisplayLink: (() -> Void)? + + func displayLink( + with viewProvider: FVPViewProvider, + callback: @escaping () -> Void + ) -> FVPDisplayLink { + fireDisplayLink = callback + return displayLink + } +} + +final class StubEventListener: NSObject, FVPVideoEventListener { + var onInitialized: (() -> Void)? + private(set) var initializationDuration: Int64 = 0 + private(set) var initializationSize: CGSize = .zero + + init(onInitialized: (() -> Void)? = nil) { + self.onInitialized = onInitialized + super.init() + } + + func videoPlayerDidComplete() {} + func videoPlayerDidEndBuffering() {} + func videoPlayerDidError(withMessage errorMessage: String) {} + func videoPlayerDidInitialize(withDuration duration: Int64, size: CGSize) { + onInitialized?() + initializationDuration = duration + initializationSize = size + } + func videoPlayerDidSetPlaying(_ playing: Bool) {} + func videoPlayerDidStartBuffering() {} + func videoPlayerDidUpdateBufferRegions(_ regions: [[NSNumber]]!) {} + func videoPlayerWasDisposed() {} +} + +final class StubTexture: NSObject, FlutterTexture { + func copyPixelBuffer() -> Unmanaged? { + return nil + } +} diff --git a/packages/video_player/video_player_avfoundation/darwin/RunnerTests/VideoPlayerTests.m b/packages/video_player/video_player_avfoundation/darwin/RunnerTests/VideoPlayerTests.m deleted file mode 100644 index 19e3a70d27fc..000000000000 --- a/packages/video_player/video_player_avfoundation/darwin/RunnerTests/VideoPlayerTests.m +++ /dev/null @@ -1,1426 +0,0 @@ -// Copyright 2013 The Flutter Authors -// Use of this source code is governed by a BSD-style license that can be -// found in the LICENSE file. - -@import AVFoundation; -@import video_player_avfoundation; -@import XCTest; - -#if TARGET_OS_IOS -@interface FakeAVAssetTrack : AVAssetTrack -@property(readonly, nonatomic) CGAffineTransform preferredTransform; -@property(readonly, nonatomic) CGSize naturalSize; -@property(readonly, nonatomic) UIImageOrientation orientation; -- (instancetype)initWithOrientation:(UIImageOrientation)orientation; -@end - -@implementation FakeAVAssetTrack - -- (instancetype)initWithOrientation:(UIImageOrientation)orientation { - _orientation = orientation; - _naturalSize = CGSizeMake(800, 600); - return self; -} - -- (CGAffineTransform)preferredTransform { - switch (_orientation) { - case UIImageOrientationUp: - return CGAffineTransformMake(1, 0, 0, 1, 0, 0); - case UIImageOrientationDown: - return CGAffineTransformMake(-1, 0, 0, -1, 0, 0); - case UIImageOrientationLeft: - return CGAffineTransformMake(0, -1, 1, 0, 0, 0); - case UIImageOrientationRight: - return CGAffineTransformMake(0, 1, -1, 0, 0, 0); - case UIImageOrientationUpMirrored: - return CGAffineTransformMake(-1, 0, 0, 1, 0, 0); - case UIImageOrientationDownMirrored: - return CGAffineTransformMake(1, 0, 0, -1, 0, 0); - case UIImageOrientationLeftMirrored: - return CGAffineTransformMake(0, -1, -1, 0, 0, 0); - case UIImageOrientationRightMirrored: - return CGAffineTransformMake(0, 1, 1, 0, 0, 0); - } -} - -@end -#endif - -@interface VideoPlayerTests : XCTestCase -@end - -/// An AVPlayer subclass that records method call parameters for inspection. -// TODO(stuartmorgan): Replace with a protocol like the other classes. -@interface InspectableAVPlayer : AVPlayer -@property(readonly, nonatomic) NSNumber *beforeTolerance; -@property(readonly, nonatomic) NSNumber *afterTolerance; -@property(readonly, assign) CMTime lastSeekTime; -@end - -@implementation InspectableAVPlayer - -- (void)seekToTime:(CMTime)time - toleranceBefore:(CMTime)toleranceBefore - toleranceAfter:(CMTime)toleranceAfter - completionHandler:(void (^)(BOOL finished))completionHandler { - _beforeTolerance = [NSNumber numberWithLong:toleranceBefore.value]; - _afterTolerance = [NSNumber numberWithLong:toleranceAfter.value]; - _lastSeekTime = time; - [super seekToTime:time - toleranceBefore:toleranceBefore - toleranceAfter:toleranceAfter - completionHandler:completionHandler]; -} - -@end - -@interface TestAsset : NSObject -@property(nonatomic, readonly) CMTime duration; -@property(nonatomic, nullable, readonly) NSArray *tracks; - -@property(nonatomic, assign) BOOL loadedTracksAsynchronously; -@end - -@implementation TestAsset -- (instancetype)init { - return [self initWithDuration:kCMTimeZero tracks:nil]; -} - -- (instancetype)initWithDuration:(CMTime)duration - tracks:(nullable NSArray *)tracks { - self = [super init]; - _duration = duration; - _tracks = tracks; - return self; -} - -- (AVKeyValueStatus)statusOfValueForKey:(NSString *)key - error:(NSError *_Nullable *_Nullable)outError { - return self.tracks == nil ? AVKeyValueStatusLoading : AVKeyValueStatusLoaded; -} - -- (void)loadValuesAsynchronouslyForKeys:(NSArray *)keys - completionHandler:(nullable void (^NS_SWIFT_SENDABLE)(void))handler { - if (handler) { - handler(); - } -} - -- (void)loadTracksWithMediaType:(AVMediaType)mediaType - completionHandler:(void (^NS_SWIFT_SENDABLE)(NSArray *_Nullable, - NSError *_Nullable))completionHandler - API_AVAILABLE(macos(12.0), ios(15.0)) { - self.loadedTracksAsynchronously = YES; - completionHandler(_tracks, nil); -} - -- (NSArray *)tracksWithMediaType:(AVMediaType)mediaType - API_DEPRECATED("Use loadTracksWithMediaType:completionHandler: instead", macos(10.7, 15.0), - ios(4.0, 18.0)) { - return _tracks ?: @[]; -} -@end - -@interface StubPlayerItem : NSObject -@property(nonatomic, readonly) NSObject *asset; -@property(nonatomic, copy, nullable) AVVideoComposition *videoComposition; -@end - -@implementation StubPlayerItem -- (instancetype)init { - return [self initWithAsset:[[TestAsset alloc] init]]; -} - -- (instancetype)initWithAsset:(NSObject *)asset { - self = [super init]; - _asset = asset; - return self; -} -@end - -@interface StubBinaryMessenger : NSObject -@end - -@implementation StubBinaryMessenger - -- (void)sendOnChannel:(NSString *)channel message:(NSData *_Nullable)message { -} -- (void)sendOnChannel:(NSString *)channel - message:(NSData *_Nullable)message - binaryReply:(FlutterBinaryReply _Nullable)callback { -} -- (FlutterBinaryMessengerConnection)setMessageHandlerOnChannel:(NSString *)channel - binaryMessageHandler: - (FlutterBinaryMessageHandler _Nullable)handler { - return 0; -} -- (void)cleanUpConnection:(FlutterBinaryMessengerConnection)connection { -} -@end - -@interface TestTextureRegistry : NSObject -@property(nonatomic, assign) BOOL registeredTexture; -@property(nonatomic, assign) BOOL unregisteredTexture; -@property(nonatomic, assign) int textureFrameAvailableCount; -@end - -@implementation TestTextureRegistry -- (int64_t)registerTexture:(NSObject *)texture { - self.registeredTexture = true; - return 1; -} - -- (void)unregisterTexture:(int64_t)textureId { - if (textureId != 1) { - XCTFail(@"Unregistering texture with wrong ID"); - } - self.unregisteredTexture = true; -} - -- (void)textureFrameAvailable:(int64_t)textureId { - if (textureId != 1) { - XCTFail(@"Texture frame available with wrong ID"); - } - self.textureFrameAvailableCount++; -} -@end - -@interface StubViewProvider : NSObject -#if TARGET_OS_IOS -- (instancetype)initWithViewController:(UIViewController *)viewController; -@property(nonatomic, nullable) UIViewController *viewController; -#else -- (instancetype)initWithView:(NSView *)view; -@property(nonatomic, nullable) NSView *view; -#endif -@end - -@implementation StubViewProvider -#if TARGET_OS_IOS -- (instancetype)initWithViewController:(UIViewController *)viewController { - self = [super init]; - _viewController = viewController; - return self; -} -#else -- (instancetype)initWithView:(NSView *)view { - self = [super init]; - _view = view; - return self; -} -#endif -@end - -@interface StubAssetProvider : NSObject -@end - -@implementation StubAssetProvider -- (NSString *)lookupKeyForAsset:(NSString *)asset { - return asset; -} - -- (NSString *)lookupKeyForAsset:(NSString *)asset fromPackage:(NSString *)package { - return asset; -} -@end - -@interface TestPixelBufferSource : NSObject -@property(nonatomic) CVPixelBufferRef pixelBuffer; -@property(nonatomic, readonly) AVPlayerItemVideoOutput *videoOutput; -@end - -@implementation TestPixelBufferSource -- (instancetype)init { - self = [super init]; - // Create an arbitrary video output to for attaching to actual AVFoundation - // objects. The attributes don't matter since this isn't used to implement - // the methods called by the plugin. - _videoOutput = [[AVPlayerItemVideoOutput alloc] initWithPixelBufferAttributes:@{ - (id)kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_32BGRA), - (id)kCVPixelBufferIOSurfacePropertiesKey : @{} - }]; - return self; -} - -- (void)dealloc { - CVPixelBufferRelease(_pixelBuffer); -} - -- (void)setPixelBuffer:(CVPixelBufferRef)pixelBuffer { - CVPixelBufferRelease(_pixelBuffer); - _pixelBuffer = CVPixelBufferRetain(pixelBuffer); -} - -- (CMTime)itemTimeForHostTime:(CFTimeInterval)hostTimeInSeconds { - return CMTimeMakeWithSeconds(hostTimeInSeconds, 1000); -} - -- (BOOL)hasNewPixelBufferForItemTime:(CMTime)itemTime { - return _pixelBuffer != NULL; -} - -- (nullable CVPixelBufferRef)copyPixelBufferForItemTime:(CMTime)itemTime - itemTimeForDisplay:(nullable CMTime *)outItemTimeForDisplay { - CVPixelBufferRef pixelBuffer = _pixelBuffer; - // Ownership is transferred to the caller. - _pixelBuffer = NULL; - return pixelBuffer; -} -@end - -#if TARGET_OS_IOS -@interface TestAudioSession : NSObject -@property(nonatomic, readwrite) AVAudioSessionCategory category; -@property(nonatomic, assign) AVAudioSessionCategoryOptions categoryOptions; - -/// Tracks whether setCategory:withOptions:error: has been called. -@property(nonatomic, assign) BOOL setCategoryCalled; -@end - -@implementation TestAudioSession -- (BOOL)setCategory:(AVAudioSessionCategory)category - withOptions:(AVAudioSessionCategoryOptions)options - error:(NSError **)outError { - self.setCategoryCalled = YES; - self.category = category; - self.categoryOptions = options; - return YES; -} -@end -#endif - -@interface StubFVPAVFactory : NSObject - -@property(nonatomic, strong) AVPlayer *player; -@property(nonatomic, strong) NSObject *playerItem; -@property(nonatomic, strong) NSObject *pixelBufferSource; -#if TARGET_OS_IOS -@property(nonatomic, strong) NSObject *audioSession; -#endif - -@end - -@implementation StubFVPAVFactory - -// Creates a factory that returns the given items. Any items that are nil will instead return -// a real object just as the non-test implementation would. -- (instancetype)initWithPlayer:(nullable AVPlayer *)player - playerItem:(nullable NSObject *)playerItem - pixelBufferSource:(nullable NSObject *)pixelBufferSource { - self = [super init]; - // Create a player with a dummy item so that the player is valid, since most tests won't work - // without a valid player. - // TODO(stuartmorgan): Introduce a protocol for AVPlayer and use a stub here instead. - NSURL *dummyURL = [NSURL URLWithString:@""]; - _player = - player ?: [[AVPlayer alloc] initWithPlayerItem:[AVPlayerItem playerItemWithURL:dummyURL]]; - _playerItem = playerItem ?: [[StubPlayerItem alloc] init]; - _pixelBufferSource = pixelBufferSource; -#if TARGET_OS_IOS - _audioSession = [[TestAudioSession alloc] init]; -#endif - return self; -} - -- (NSObject *)URLAssetWithURL:(NSURL *)URL - options:(nullable NSDictionary *)options { - return self.playerItem.asset; -} - -- (NSObject *)playerItemWithAsset:(NSObject *)asset { - return self.playerItem; -} - -- (AVPlayer *)playerWithPlayerItem:(NSObject *)playerItem { - return self.player; -} - -- (NSObject *)videoOutputWithPixelBufferAttributes: - (NSDictionary *)attributes { - return self.pixelBufferSource ?: [[TestPixelBufferSource alloc] init]; -} - -#if TARGET_OS_IOS -- (NSObject *)sharedAudioSession { - return self.audioSession; -} -#endif - -@end - -#pragma mark - - -@interface StubFVPDisplayLink : NSObject -@property(nonatomic, assign) BOOL running; -@end - -@implementation StubFVPDisplayLink -- (CFTimeInterval)duration { - return 1.0 / 60.0; -} -@end - -/** Test implementation of FVPDisplayLinkFactory that returns a stub display link instance. */ -@interface StubFVPDisplayLinkFactory : NSObject -/** This display link to return. */ -@property(nonatomic, strong) StubFVPDisplayLink *displayLink; -@property(nonatomic, copy) void (^fireDisplayLink)(void); -@end - -@implementation StubFVPDisplayLinkFactory -- (instancetype)init { - self = [super init]; - _displayLink = [[StubFVPDisplayLink alloc] init]; - return self; -} -- (NSObject *)displayLinkWithViewProvider:(NSObject *)viewProvider - callback:(void (^)(void))callback { - self.fireDisplayLink = callback; - return self.displayLink; -} - -@end - -#pragma mark - - -@interface StubEventListener : NSObject - -@property(nonatomic) XCTestExpectation *initializationExpectation; -@property(nonatomic) int64_t initializationDuration; -@property(nonatomic) CGSize initializationSize; - -- (instancetype)initWithInitializationExpectation:(XCTestExpectation *)expectation; - -@end - -@implementation StubEventListener - -- (instancetype)initWithInitializationExpectation:(XCTestExpectation *)expectation { - self = [super init]; - _initializationExpectation = expectation; - return self; -} - -- (void)videoPlayerDidComplete { -} - -- (void)videoPlayerDidEndBuffering { -} - -- (void)videoPlayerDidErrorWithMessage:(NSString *)errorMessage { -} - -- (void)videoPlayerDidInitializeWithDuration:(int64_t)duration size:(CGSize)size { - [self.initializationExpectation fulfill]; - self.initializationDuration = duration; - self.initializationSize = size; -} - -- (void)videoPlayerDidSetPlaying:(BOOL)playing { -} - -- (void)videoPlayerDidStartBuffering { -} - -- (void)videoPlayerDidUpdateBufferRegions:(NSArray *> *)regions { -} - -- (void)videoPlayerWasDisposed { -} - -@end - -#pragma mark - - -@implementation VideoPlayerTests - -- (void)testBlankVideoBugWithEncryptedVideoStreamAndInvertedAspectRatioBugForSomeVideoStream { - // This is to fix 2 bugs: 1. blank video for encrypted video streams on iOS 16 - // (https://github.com/flutter/flutter/issues/111457) and 2. swapped width and height for some - // video streams (not just iOS 16). (https://github.com/flutter/flutter/issues/109116). An - // invisible AVPlayerLayer is used to overwrite the protection of pixel buffers in those streams - // for issue #1, and restore the correct width and height for issue #2. -#if TARGET_OS_OSX - NSView *view = [[NSView alloc] initWithFrame:NSMakeRect(0, 0, 10, 10)]; - view.wantsLayer = true; - id viewProvider = [[StubViewProvider alloc] initWithView:view]; -#else - UIView *view = [[UIView alloc] initWithFrame:CGRectMake(0, 0, 10, 10)]; - UIViewController *viewController = [[UIViewController alloc] init]; - viewController.view = view; - id viewProvider = - [[StubViewProvider alloc] initWithViewController:viewController]; -#endif - FVPVideoPlayerPlugin *videoPlayerPlugin = - [[FVPVideoPlayerPlugin alloc] initWithAVFactory:[[StubFVPAVFactory alloc] initWithPlayer:nil - playerItem:nil - pixelBufferSource:nil] - displayLinkFactory:nil - binaryMessenger:[[StubBinaryMessenger alloc] init] - textureRegistry:[[TestTextureRegistry alloc] init] - viewProvider:viewProvider - assetProvider:[[StubAssetProvider alloc] init]]; - - FlutterError *error; - [videoPlayerPlugin initialize:&error]; - XCTAssertNil(error); - - FVPCreationOptions *create = [FVPCreationOptions - makeWithUri:@"https://flutter.github.io/assets-for-api-docs/assets/videos/bee.mp4" - httpHeaders:@{}]; - FVPTexturePlayerIds *identifiers = [videoPlayerPlugin createTexturePlayerWithOptions:create - error:&error]; - XCTAssertNil(error); - XCTAssertNotNil(identifiers); - FVPTextureBasedVideoPlayer *player = - (FVPTextureBasedVideoPlayer *)videoPlayerPlugin.playersByIdentifier[@(identifiers.playerId)]; - XCTAssertNotNil(player); - - XCTAssertNotNil(player.playerLayer, @"AVPlayerLayer should be present."); - XCTAssertEqual(player.playerLayer.superlayer, view.layer, - @"AVPlayerLayer should be added on screen."); -} - -- (void)testPlayerForPlatformViewDoesNotRegisterTexture { - TestTextureRegistry *textureRegistry = [[TestTextureRegistry alloc] init]; - StubFVPDisplayLinkFactory *stubDisplayLinkFactory = [[StubFVPDisplayLinkFactory alloc] init]; - FVPVideoPlayerPlugin *videoPlayerPlugin = - [[FVPVideoPlayerPlugin alloc] initWithAVFactory:[[StubFVPAVFactory alloc] initWithPlayer:nil - playerItem:nil - pixelBufferSource:nil] - displayLinkFactory:stubDisplayLinkFactory - binaryMessenger:[[StubBinaryMessenger alloc] init] - textureRegistry:textureRegistry - viewProvider:[[StubViewProvider alloc] init] - assetProvider:[[StubAssetProvider alloc] init]]; - - FlutterError *initializationError; - [videoPlayerPlugin initialize:&initializationError]; - XCTAssertNil(initializationError); - FVPCreationOptions *create = [FVPCreationOptions - makeWithUri:@"https://flutter.github.io/assets-for-api-docs/assets/videos/hls/bee.m3u8" - httpHeaders:@{}]; - FlutterError *createError; - [videoPlayerPlugin createPlatformViewPlayerWithOptions:create error:&createError]; - - XCTAssertFalse(textureRegistry.registeredTexture); -} - -- (void)testSeekToWhilePausedStartsDisplayLinkTemporarily { - StubFVPDisplayLinkFactory *stubDisplayLinkFactory = [[StubFVPDisplayLinkFactory alloc] init]; - TestPixelBufferSource *mockVideoOutput = [[TestPixelBufferSource alloc] init]; - // Display link and frame updater wire-up is currently done in FVPVideoPlayerPlugin, so create - // the player via the plugin instead of directly to include that logic in the test. - FVPVideoPlayerPlugin *videoPlayerPlugin = [[FVPVideoPlayerPlugin alloc] - initWithAVFactory:[[StubFVPAVFactory alloc] initWithPlayer:nil - playerItem:nil - pixelBufferSource:mockVideoOutput] - displayLinkFactory:stubDisplayLinkFactory - binaryMessenger:[[StubBinaryMessenger alloc] init] - textureRegistry:[[TestTextureRegistry alloc] init] - viewProvider:[[StubViewProvider alloc] init] - assetProvider:[[StubAssetProvider alloc] init]]; - - FlutterError *initializationError; - [videoPlayerPlugin initialize:&initializationError]; - XCTAssertNil(initializationError); - FVPCreationOptions *create = [FVPCreationOptions - makeWithUri:@"https://flutter.github.io/assets-for-api-docs/assets/videos/hls/bee.m3u8" - httpHeaders:@{}]; - FlutterError *createError; - FVPTexturePlayerIds *identifiers = - [videoPlayerPlugin createTexturePlayerWithOptions:create error:&createError]; - FVPTextureBasedVideoPlayer *player = - (FVPTextureBasedVideoPlayer *)videoPlayerPlugin.playersByIdentifier[@(identifiers.playerId)]; - - // Ensure that the video playback is paused before seeking. - FlutterError *pauseError; - [player pauseWithError:&pauseError]; - - XCTestExpectation *seekExpectation = [self expectationWithDescription:@"seekTo completes"]; - [player seekTo:1234 - completion:^(FlutterError *_Nullable error) { - [seekExpectation fulfill]; - }]; - [self waitForExpectationsWithTimeout:30.0 handler:nil]; - - // Seeking to a new position should start the display link temporarily. - XCTAssertTrue(stubDisplayLinkFactory.displayLink.running); - - // Simulate a buffer being available. - CVPixelBufferRef bufferRef; - CVPixelBufferCreate(NULL, 1, 1, kCVPixelFormatType_32BGRA, NULL, &bufferRef); - mockVideoOutput.pixelBuffer = bufferRef; - CVPixelBufferRelease(bufferRef); - // Simulate a callback from the engine to request a new frame. - stubDisplayLinkFactory.fireDisplayLink(); - CFRelease([player copyPixelBuffer]); - // Since a frame was found, and the video is paused, the display link should be paused again. - XCTAssertFalse(stubDisplayLinkFactory.displayLink.running); -} - -- (void)testInitStartsDisplayLinkTemporarily { - StubFVPDisplayLinkFactory *stubDisplayLinkFactory = [[StubFVPDisplayLinkFactory alloc] init]; - TestPixelBufferSource *mockVideoOutput = [[TestPixelBufferSource alloc] init]; - FVPVideoPlayerPlugin *videoPlayerPlugin = [[FVPVideoPlayerPlugin alloc] - initWithAVFactory:[[StubFVPAVFactory alloc] initWithPlayer:nil - playerItem:nil - pixelBufferSource:mockVideoOutput] - displayLinkFactory:stubDisplayLinkFactory - binaryMessenger:[[StubBinaryMessenger alloc] init] - textureRegistry:[[TestTextureRegistry alloc] init] - viewProvider:[[StubViewProvider alloc] init] - assetProvider:[[StubAssetProvider alloc] init]]; - - FlutterError *initializationError; - [videoPlayerPlugin initialize:&initializationError]; - XCTAssertNil(initializationError); - FVPCreationOptions *create = [FVPCreationOptions - makeWithUri:@"https://flutter.github.io/assets-for-api-docs/assets/videos/hls/bee.m3u8" - httpHeaders:@{}]; - FlutterError *createError; - FVPTexturePlayerIds *identifiers = - [videoPlayerPlugin createTexturePlayerWithOptions:create error:&createError]; - - // Init should start the display link temporarily. - XCTAssertTrue(stubDisplayLinkFactory.displayLink.running); - - // Simulate a buffer being available. - CVPixelBufferRef bufferRef; - CVPixelBufferCreate(NULL, 1, 1, kCVPixelFormatType_32BGRA, NULL, &bufferRef); - mockVideoOutput.pixelBuffer = bufferRef; - CVPixelBufferRelease(bufferRef); - // Simulate a callback from the engine to request a new frame. - FVPTextureBasedVideoPlayer *player = - (FVPTextureBasedVideoPlayer *)videoPlayerPlugin.playersByIdentifier[@(identifiers.playerId)]; - stubDisplayLinkFactory.fireDisplayLink(); - CFRelease([player copyPixelBuffer]); - // Since a frame was found, and the video is paused, the display link should be paused again. - XCTAssertFalse(stubDisplayLinkFactory.displayLink.running); -} - -- (void)testSeekToWhilePlayingDoesNotStopDisplayLink { - StubFVPDisplayLinkFactory *stubDisplayLinkFactory = [[StubFVPDisplayLinkFactory alloc] init]; - TestPixelBufferSource *mockVideoOutput = [[TestPixelBufferSource alloc] init]; - // Display link and frame updater wire-up is currently done in FVPVideoPlayerPlugin, so create - // the player via the plugin instead of directly to include that logic in the test. - FVPVideoPlayerPlugin *videoPlayerPlugin = [[FVPVideoPlayerPlugin alloc] - initWithAVFactory:[[StubFVPAVFactory alloc] initWithPlayer:nil - playerItem:nil - pixelBufferSource:mockVideoOutput] - displayLinkFactory:stubDisplayLinkFactory - binaryMessenger:[[StubBinaryMessenger alloc] init] - textureRegistry:[[TestTextureRegistry alloc] init] - viewProvider:[[StubViewProvider alloc] init] - assetProvider:[[StubAssetProvider alloc] init]]; - - FlutterError *initializationError; - [videoPlayerPlugin initialize:&initializationError]; - XCTAssertNil(initializationError); - FVPCreationOptions *create = [FVPCreationOptions - makeWithUri:@"https://flutter.github.io/assets-for-api-docs/assets/videos/hls/bee.m3u8" - httpHeaders:@{}]; - FlutterError *createError; - FVPTexturePlayerIds *identifiers = - [videoPlayerPlugin createTexturePlayerWithOptions:create error:&createError]; - FVPTextureBasedVideoPlayer *player = - (FVPTextureBasedVideoPlayer *)videoPlayerPlugin.playersByIdentifier[@(identifiers.playerId)]; - - // Ensure that the video is playing before seeking. - FlutterError *playError; - [player playWithError:&playError]; - - XCTestExpectation *seekExpectation = [self expectationWithDescription:@"seekTo completes"]; - [player seekTo:1234 - completion:^(FlutterError *_Nullable error) { - [seekExpectation fulfill]; - }]; - [self waitForExpectationsWithTimeout:30.0 handler:nil]; - XCTAssertTrue(stubDisplayLinkFactory.displayLink.running); - - // Simulate a buffer being available. - CVPixelBufferRef bufferRef; - CVPixelBufferCreate(NULL, 1, 1, kCVPixelFormatType_32BGRA, NULL, &bufferRef); - mockVideoOutput.pixelBuffer = bufferRef; - CVPixelBufferRelease(bufferRef); - // Simulate a callback from the engine to request a new frame. - stubDisplayLinkFactory.fireDisplayLink(); - CFRelease([player copyPixelBuffer]); - // Since the video was playing, the display link should not be paused after getting a buffer. - XCTAssertTrue(stubDisplayLinkFactory.displayLink.running); -} - -- (void)testPauseWhileWaitingForFrameDoesNotStopDisplayLink { - StubFVPDisplayLinkFactory *stubDisplayLinkFactory = [[StubFVPDisplayLinkFactory alloc] init]; - // Display link and frame updater wire-up is currently done in FVPVideoPlayerPlugin, so create - // the player via the plugin instead of directly to include that logic in the test. - FVPVideoPlayerPlugin *videoPlayerPlugin = - [[FVPVideoPlayerPlugin alloc] initWithAVFactory:[[StubFVPAVFactory alloc] initWithPlayer:nil - playerItem:nil - pixelBufferSource:nil] - displayLinkFactory:stubDisplayLinkFactory - binaryMessenger:[[StubBinaryMessenger alloc] init] - textureRegistry:[[TestTextureRegistry alloc] init] - viewProvider:[[StubViewProvider alloc] init] - assetProvider:[[StubAssetProvider alloc] init]]; - - FlutterError *initializationError; - [videoPlayerPlugin initialize:&initializationError]; - XCTAssertNil(initializationError); - FVPCreationOptions *create = [FVPCreationOptions - makeWithUri:@"https://flutter.github.io/assets-for-api-docs/assets/videos/hls/bee.m3u8" - httpHeaders:@{}]; - FlutterError *createError; - FVPTexturePlayerIds *identifiers = - [videoPlayerPlugin createTexturePlayerWithOptions:create error:&createError]; - FVPTextureBasedVideoPlayer *player = - (FVPTextureBasedVideoPlayer *)videoPlayerPlugin.playersByIdentifier[@(identifiers.playerId)]; - - // Run a play/pause cycle to force the pause codepath to run completely. - FlutterError *playPauseError; - [player playWithError:&playPauseError]; - [player pauseWithError:&playPauseError]; - - // Since a buffer hasn't been available yet, the pause should not have stopped the display link. - XCTAssertTrue(stubDisplayLinkFactory.displayLink.running); -} - -- (void)testDeregistersFromPlayer { - FVPVideoPlayerPlugin *videoPlayerPlugin = - [[FVPVideoPlayerPlugin alloc] initWithAVFactory:[[StubFVPAVFactory alloc] initWithPlayer:nil - playerItem:nil - pixelBufferSource:nil] - displayLinkFactory:nil - binaryMessenger:[[StubBinaryMessenger alloc] init] - textureRegistry:[[TestTextureRegistry alloc] init] - viewProvider:[[StubViewProvider alloc] init] - assetProvider:[[StubAssetProvider alloc] init]]; - - FlutterError *error; - [videoPlayerPlugin initialize:&error]; - XCTAssertNil(error); - - FVPCreationOptions *create = [FVPCreationOptions - makeWithUri:@"https://flutter.github.io/assets-for-api-docs/assets/videos/bee.mp4" - httpHeaders:@{}]; - FVPTexturePlayerIds *identifiers = [videoPlayerPlugin createTexturePlayerWithOptions:create - error:&error]; - XCTAssertNil(error); - XCTAssertNotNil(identifiers); - FVPVideoPlayer *player = videoPlayerPlugin.playersByIdentifier[@(identifiers.playerId)]; - XCTAssertNotNil(player); - - [player disposeWithError:&error]; - XCTAssertEqual(videoPlayerPlugin.playersByIdentifier.count, 0); - XCTAssertNil(error); -} - -- (void)testBufferingStateFromPlayer { - NSObject *realObjectFactory = [[FVPDefaultAVFactory alloc] init]; - FVPVideoPlayerPlugin *videoPlayerPlugin = - [[FVPVideoPlayerPlugin alloc] initWithAVFactory:realObjectFactory - displayLinkFactory:nil - binaryMessenger:[[StubBinaryMessenger alloc] init] - textureRegistry:[[TestTextureRegistry alloc] init] - viewProvider:[[StubViewProvider alloc] init] - assetProvider:[[StubAssetProvider alloc] init]]; - - FlutterError *error; - [videoPlayerPlugin initialize:&error]; - XCTAssertNil(error); - - FVPCreationOptions *create = [FVPCreationOptions - makeWithUri:@"https://flutter.github.io/assets-for-api-docs/assets/videos/bee.mp4" - httpHeaders:@{}]; - FVPTexturePlayerIds *identifiers = [videoPlayerPlugin createTexturePlayerWithOptions:create - error:&error]; - XCTAssertNil(error); - XCTAssertNotNil(identifiers); - FVPVideoPlayer *player = videoPlayerPlugin.playersByIdentifier[@(identifiers.playerId)]; - XCTAssertNotNil(player); - AVPlayer *avPlayer = player.player; - [avPlayer play]; - - // TODO(stuartmorgan): Update this test to instead use a mock listener, and add separate unit - // tests of FVPEventBridge. - [(NSObject *)player.eventListener - onListenWithArguments:nil - eventSink:^(NSDictionary *event) { - if ([event[@"event"] isEqualToString:@"bufferingEnd"]) { - XCTAssertTrue(avPlayer.currentItem.isPlaybackLikelyToKeepUp); - } - - if ([event[@"event"] isEqualToString:@"bufferingStart"]) { - XCTAssertFalse(avPlayer.currentItem.isPlaybackLikelyToKeepUp); - } - }]; - XCTestExpectation *bufferingStateExpectation = - [self expectationWithDescription:@"bufferingState"]; - NSTimeInterval timeout = 10; - dispatch_time_t delay = dispatch_time(DISPATCH_TIME_NOW, timeout * NSEC_PER_SEC); - dispatch_after(delay, dispatch_get_main_queue(), ^{ - [bufferingStateExpectation fulfill]; - }); - [self waitForExpectationsWithTimeout:timeout + 1 handler:nil]; -} - -- (void)testVideoControls { - StubEventListener *eventListener = - [self sanityTestURI:@"https://flutter.github.io/assets-for-api-docs/assets/videos/bee.mp4"]; - XCTAssertEqual(eventListener.initializationSize.height, 720); - XCTAssertEqual(eventListener.initializationSize.width, 1280); - XCTAssertEqualWithAccuracy(eventListener.initializationDuration, 4000, 200); -} - -- (void)testAudioControls { - StubEventListener *eventListener = [self - sanityTestURI:@"https://flutter.github.io/assets-for-api-docs/assets/audio/rooster.mp3"]; - XCTAssertEqual(eventListener.initializationSize.height, 0); - XCTAssertEqual(eventListener.initializationSize.width, 0); - // Perfect precision not guaranteed. - XCTAssertEqualWithAccuracy(eventListener.initializationDuration, 5400, 200); -} - -- (void)testHLSControls { - StubEventListener *eventListener = [self - sanityTestURI:@"https://flutter.github.io/assets-for-api-docs/assets/videos/hls/bee.m3u8"]; - XCTAssertEqual(eventListener.initializationSize.height, 720); - XCTAssertEqual(eventListener.initializationSize.width, 1280); - XCTAssertEqualWithAccuracy(eventListener.initializationDuration, 4000, 200); -} - -- (void)testAudioOnlyHLSControls { - XCTSkip(@"Flaky; see https://github.com/flutter/flutter/issues/164381"); - - StubEventListener *eventListener = - [self sanityTestURI:@"https://flutter.github.io/assets-for-api-docs/assets/videos/hls/" - @"bee_audio_only.m3u8"]; - XCTAssertEqual(eventListener.initializationSize.height, 0); - XCTAssertEqual(eventListener.initializationSize.width, 0); - XCTAssertEqualWithAccuracy(eventListener.initializationDuration, 4000, 200); -} - -#if TARGET_OS_IOS -- (void)testTransformFix { - [self validateTransformFixForOrientation:UIImageOrientationUp]; - [self validateTransformFixForOrientation:UIImageOrientationDown]; - [self validateTransformFixForOrientation:UIImageOrientationLeft]; - [self validateTransformFixForOrientation:UIImageOrientationRight]; - [self validateTransformFixForOrientation:UIImageOrientationUpMirrored]; - [self validateTransformFixForOrientation:UIImageOrientationDownMirrored]; - [self validateTransformFixForOrientation:UIImageOrientationLeftMirrored]; - [self validateTransformFixForOrientation:UIImageOrientationRightMirrored]; -} -#endif - -- (void)testSeekToleranceWhenNotSeekingToEnd { - InspectableAVPlayer *inspectableAVPlayer = [[InspectableAVPlayer alloc] init]; - StubFVPAVFactory *stubAVFactory = [[StubFVPAVFactory alloc] initWithPlayer:inspectableAVPlayer - playerItem:nil - pixelBufferSource:nil]; - FVPVideoPlayer *player = - [[FVPVideoPlayer alloc] initWithPlayerItem:[[StubPlayerItem alloc] init] - avFactory:stubAVFactory - viewProvider:[[StubViewProvider alloc] init]]; - NSObject *listener = [[StubEventListener alloc] init]; - player.eventListener = listener; - - XCTestExpectation *seekExpectation = - [self expectationWithDescription:@"seekTo has zero tolerance when seeking not to end"]; - [player seekTo:1234 - completion:^(FlutterError *_Nullable error) { - [seekExpectation fulfill]; - }]; - - [self waitForExpectationsWithTimeout:30.0 handler:nil]; - XCTAssertEqual([inspectableAVPlayer.beforeTolerance intValue], 0); - XCTAssertEqual([inspectableAVPlayer.afterTolerance intValue], 0); -} - -- (void)testSeekToleranceWhenSeekingToEnd { - InspectableAVPlayer *inspectableAVPlayer = [[InspectableAVPlayer alloc] init]; - StubFVPAVFactory *stubAVFactory = [[StubFVPAVFactory alloc] initWithPlayer:inspectableAVPlayer - playerItem:nil - pixelBufferSource:nil]; - FVPVideoPlayer *player = - [[FVPVideoPlayer alloc] initWithPlayerItem:[[StubPlayerItem alloc] init] - avFactory:stubAVFactory - viewProvider:[[StubViewProvider alloc] init]]; - NSObject *listener = [[StubEventListener alloc] init]; - player.eventListener = listener; - - XCTestExpectation *seekExpectation = - [self expectationWithDescription:@"seekTo has non-zero tolerance when seeking to end"]; - // The duration of this video is "0" due to the non standard initiliatazion process. - [player seekTo:0 - completion:^(FlutterError *_Nullable error) { - [seekExpectation fulfill]; - }]; - [self waitForExpectationsWithTimeout:30.0 handler:nil]; - XCTAssertGreaterThan([inspectableAVPlayer.beforeTolerance intValue], 0); - XCTAssertGreaterThan([inspectableAVPlayer.afterTolerance intValue], 0); -} - -/// Sanity checks a video player playing the given URL with the actual AVPlayer. This is essentially -/// a mini integration test of the player component. -/// -/// Returns the stub event listener to allow tests to inspect the call state. -- (StubEventListener *)sanityTestURI:(NSString *)testURI { - NSObject *realObjectFactory = [[FVPDefaultAVFactory alloc] init]; - NSURL *testURL = [NSURL URLWithString:testURI]; - XCTAssertNotNil(testURL); - FVPVideoPlayer *player = [[FVPVideoPlayer alloc] - initWithPlayerItem:[self playerItemWithURL:testURL factory:realObjectFactory] - avFactory:realObjectFactory - viewProvider:[[StubViewProvider alloc] init]]; - XCTAssertNotNil(player); - - XCTestExpectation *initializedExpectation = [self expectationWithDescription:@"initialized"]; - StubEventListener *listener = - [[StubEventListener alloc] initWithInitializationExpectation:initializedExpectation]; - player.eventListener = listener; - [self waitForExpectationsWithTimeout:30.0 handler:nil]; - - // Starts paused. - AVPlayer *avPlayer = player.player; - XCTAssertEqual(avPlayer.rate, 0); - XCTAssertEqual(avPlayer.volume, 1); - XCTAssertEqual(avPlayer.timeControlStatus, AVPlayerTimeControlStatusPaused); - - // Change playback speed. - FlutterError *error; - [player setPlaybackSpeed:2 error:&error]; - XCTAssertNil(error); - [player playWithError:&error]; - XCTAssertNil(error); - XCTAssertEqual(avPlayer.rate, 2); - XCTAssertEqual(avPlayer.timeControlStatus, AVPlayerTimeControlStatusWaitingToPlayAtSpecifiedRate); - - // Volume - [player setVolume:0.1 error:&error]; - XCTAssertNil(error); - XCTAssertEqual(avPlayer.volume, 0.1f); - - return listener; -} - -// Checks whether [AVPlayer rate] KVO observations are correctly detached. -// - https://github.com/flutter/flutter/issues/124937 -// -// Failing to de-register results in a crash in [AVPlayer willChangeValueForKey:]. -- (void)testDoesNotCrashOnRateObservationAfterDisposal { - NSObject *realObjectFactory = [[FVPDefaultAVFactory alloc] init]; - - AVPlayer *avPlayer = nil; - __weak FVPVideoPlayer *weakPlayer = nil; - - // Autoreleasepool is needed to simulate conditions of FVPVideoPlayer deallocation. - @autoreleasepool { - FVPVideoPlayerPlugin *videoPlayerPlugin = - [[FVPVideoPlayerPlugin alloc] initWithAVFactory:realObjectFactory - displayLinkFactory:nil - binaryMessenger:[[StubBinaryMessenger alloc] init] - textureRegistry:[[TestTextureRegistry alloc] init] - viewProvider:[[StubViewProvider alloc] init] - assetProvider:[[StubAssetProvider alloc] init]]; - - FlutterError *error; - [videoPlayerPlugin initialize:&error]; - XCTAssertNil(error); - - FVPCreationOptions *create = [FVPCreationOptions - makeWithUri:@"https://flutter.github.io/assets-for-api-docs/assets/videos/bee.mp4" - httpHeaders:@{}]; - FVPTexturePlayerIds *identifiers = [videoPlayerPlugin createTexturePlayerWithOptions:create - error:&error]; - XCTAssertNil(error); - XCTAssertNotNil(identifiers); - - FVPVideoPlayer *player = videoPlayerPlugin.playersByIdentifier[@(identifiers.playerId)]; - XCTAssertNotNil(player); - weakPlayer = player; - avPlayer = player.player; - - [player disposeWithError:&error]; - XCTAssertNil(error); - } - - // [FVPVideoPlayerPlugin dispose:error:] selector is dispatching the [FVPVideoPlayer dispose] call - // with a 1-second delay keeping a strong reference to the player. The polling ensures the player - // was truly deallocated. -#pragma clang diagnostic push -#pragma clang diagnostic ignored "-Warc-repeated-use-of-weak" - [self expectationForPredicate:[NSPredicate predicateWithFormat:@"self != nil"] - evaluatedWithObject:weakPlayer - handler:nil]; -#pragma clang diagnostic pop - [self waitForExpectationsWithTimeout:10.0 handler:nil]; - - [avPlayer willChangeValueForKey:@"rate"]; // No assertions needed. Lack of crash is a success. -} - -// During the hot reload: -// 1. `[FVPVideoPlayer onTextureUnregistered:]` gets called. -// 2. `[FVPVideoPlayerPlugin initialize:]` gets called. -// -// Both of these methods dispatch [FVPVideoPlayer dispose] on the main thread -// leading to a possible crash when de-registering observers twice. -- (void)testHotReloadDoesNotCrash { - __weak FVPVideoPlayer *weakPlayer = nil; - - // Autoreleasepool is needed to simulate conditions of FVPVideoPlayer deallocation. - @autoreleasepool { - FVPVideoPlayerPlugin *videoPlayerPlugin = [[FVPVideoPlayerPlugin alloc] - initWithAVFactory:[[StubFVPAVFactory alloc] initWithPlayer:nil - playerItem:nil - pixelBufferSource:nil] - displayLinkFactory:nil - binaryMessenger:[[StubBinaryMessenger alloc] init] - textureRegistry:[[TestTextureRegistry alloc] init] - viewProvider:[[StubViewProvider alloc] init] - assetProvider:[[StubAssetProvider alloc] init]]; - - FlutterError *error; - [videoPlayerPlugin initialize:&error]; - XCTAssertNil(error); - - FVPCreationOptions *create = [FVPCreationOptions - makeWithUri:@"https://flutter.github.io/assets-for-api-docs/assets/videos/bee.mp4" - httpHeaders:@{}]; - FVPTexturePlayerIds *identifiers = [videoPlayerPlugin createTexturePlayerWithOptions:create - error:&error]; - XCTAssertNil(error); - XCTAssertNotNil(identifiers); - - FVPTextureBasedVideoPlayer *player = - (FVPTextureBasedVideoPlayer *) - videoPlayerPlugin.playersByIdentifier[@(identifiers.playerId)]; - XCTAssertNotNil(player); - weakPlayer = player; - - [player onTextureUnregistered:nil]; - XCTAssertNil(error); - - [videoPlayerPlugin initialize:&error]; - XCTAssertNil(error); - } - - // [FVPVideoPlayerPlugin dispose:error:] selector is dispatching the [FVPVideoPlayer dispose] call - // with a 1-second delay keeping a strong reference to the player. The polling ensures the player - // was truly deallocated. -#pragma clang diagnostic push -#pragma clang diagnostic ignored "-Warc-repeated-use-of-weak" - [self expectationForPredicate:[NSPredicate predicateWithFormat:@"self != nil"] - evaluatedWithObject:weakPlayer - handler:nil]; -#pragma clang diagnostic pop - [self waitForExpectationsWithTimeout:10.0 - handler:nil]; // No assertions needed. Lack of crash is a success. -} - -- (void)testFailedToLoadVideoEventShouldBeAlwaysSent { - // Use real objects to test a real failure flow. - NSObject *realObjectFactory = [[FVPDefaultAVFactory alloc] init]; - FVPVideoPlayerPlugin *videoPlayerPlugin = - [[FVPVideoPlayerPlugin alloc] initWithAVFactory:realObjectFactory - displayLinkFactory:nil - binaryMessenger:[[StubBinaryMessenger alloc] init] - textureRegistry:[[TestTextureRegistry alloc] init] - viewProvider:[[StubViewProvider alloc] init] - assetProvider:[[StubAssetProvider alloc] init]]; - FlutterError *error; - - [videoPlayerPlugin initialize:&error]; - - FVPCreationOptions *create = [FVPCreationOptions makeWithUri:@"" httpHeaders:@{}]; - FVPTexturePlayerIds *identifiers = [videoPlayerPlugin createTexturePlayerWithOptions:create - error:&error]; - FVPVideoPlayer *player = videoPlayerPlugin.playersByIdentifier[@(identifiers.playerId)]; - XCTAssertNotNil(player); - - [self keyValueObservingExpectationForObject:(id)player.player.currentItem - keyPath:@"status" - expectedValue:@(AVPlayerItemStatusFailed)]; - [self waitForExpectationsWithTimeout:10.0 handler:nil]; - - XCTestExpectation *failedExpectation = [self expectationWithDescription:@"failed"]; - // TODO(stuartmorgan): Update this test to instead use a mock listener, and add separate unit - // tests of FVPEventBridge. - [(NSObject *)player.eventListener - onListenWithArguments:nil - eventSink:^(FlutterError *event) { - if ([event isKindOfClass:FlutterError.class]) { - [failedExpectation fulfill]; - } - }]; - [self waitForExpectationsWithTimeout:10.0 handler:nil]; -} - -- (void)testUpdatePlayingStateShouldNotResetRate { - NSObject *realObjectFactory = [[FVPDefaultAVFactory alloc] init]; - FVPVideoPlayer *player = [[FVPVideoPlayer alloc] - initWithPlayerItem:[self playerItemWithURL:self.mp4TestURL factory:realObjectFactory] - avFactory:realObjectFactory - viewProvider:[[StubViewProvider alloc] init]]; - - XCTestExpectation *initializedExpectation = [self expectationWithDescription:@"initialized"]; - StubEventListener *listener = - [[StubEventListener alloc] initWithInitializationExpectation:initializedExpectation]; - player.eventListener = listener; - [self waitForExpectationsWithTimeout:10 handler:nil]; - - FlutterError *error; - [player setPlaybackSpeed:2 error:&error]; - [player playWithError:&error]; - XCTAssertEqual(player.player.rate, 2); -} - -- (void)testPlayerShouldNotDropEverySecondFrame { - TestTextureRegistry *textureRegistry = [[TestTextureRegistry alloc] init]; - - StubFVPDisplayLinkFactory *stubDisplayLinkFactory = [[StubFVPDisplayLinkFactory alloc] init]; - TestPixelBufferSource *mockVideoOutput = [[TestPixelBufferSource alloc] init]; - FVPVideoPlayerPlugin *videoPlayerPlugin = [[FVPVideoPlayerPlugin alloc] - initWithAVFactory:[[StubFVPAVFactory alloc] initWithPlayer:nil - playerItem:nil - pixelBufferSource:mockVideoOutput] - displayLinkFactory:stubDisplayLinkFactory - binaryMessenger:[[StubBinaryMessenger alloc] init] - textureRegistry:textureRegistry - viewProvider:[[StubViewProvider alloc] init] - assetProvider:[[StubAssetProvider alloc] init]]; - - FlutterError *error; - [videoPlayerPlugin initialize:&error]; - XCTAssertNil(error); - FVPCreationOptions *create = [FVPCreationOptions - makeWithUri:@"https://flutter.github.io/assets-for-api-docs/assets/videos/bee.mp4" - httpHeaders:@{}]; - FVPTexturePlayerIds *identifiers = [videoPlayerPlugin createTexturePlayerWithOptions:create - error:&error]; - NSInteger playerIdentifier = identifiers.playerId; - FVPTextureBasedVideoPlayer *player = - (FVPTextureBasedVideoPlayer *)videoPlayerPlugin.playersByIdentifier[@(playerIdentifier)]; - - void (^addFrame)(void) = ^{ - CVPixelBufferRef bufferRef; - CVPixelBufferCreate(NULL, 1, 1, kCVPixelFormatType_32BGRA, NULL, &bufferRef); - mockVideoOutput.pixelBuffer = bufferRef; - CVPixelBufferRelease(bufferRef); - }; - - addFrame(); - stubDisplayLinkFactory.fireDisplayLink(); - CFRelease([player copyPixelBuffer]); - XCTAssertEqual(textureRegistry.textureFrameAvailableCount, 1); - - addFrame(); - stubDisplayLinkFactory.fireDisplayLink(); - CFRelease([player copyPixelBuffer]); - XCTAssertEqual(textureRegistry.textureFrameAvailableCount, 2); -} - -- (void)testVideoOutputIsAddedWhenAVPlayerItemBecomesReady { - NSObject *realObjectFactory = [[FVPDefaultAVFactory alloc] init]; - FVPVideoPlayerPlugin *videoPlayerPlugin = - [[FVPVideoPlayerPlugin alloc] initWithAVFactory:realObjectFactory - displayLinkFactory:nil - binaryMessenger:[[StubBinaryMessenger alloc] init] - textureRegistry:[[TestTextureRegistry alloc] init] - viewProvider:[[StubViewProvider alloc] init] - assetProvider:[[StubAssetProvider alloc] init]]; - FlutterError *error; - [videoPlayerPlugin initialize:&error]; - XCTAssertNil(error); - FVPCreationOptions *create = [FVPCreationOptions - makeWithUri:@"https://flutter.github.io/assets-for-api-docs/assets/videos/bee.mp4" - httpHeaders:@{}]; - - FVPTexturePlayerIds *identifiers = [videoPlayerPlugin createTexturePlayerWithOptions:create - error:&error]; - XCTAssertNil(error); - XCTAssertNotNil(identifiers); - FVPVideoPlayer *player = videoPlayerPlugin.playersByIdentifier[@(identifiers.playerId)]; - XCTAssertNotNil(player); - - AVPlayerItem *item = player.player.currentItem; - [self keyValueObservingExpectationForObject:(id)item - keyPath:@"status" - expectedValue:@(AVPlayerItemStatusReadyToPlay)]; - [self waitForExpectationsWithTimeout:10.0 handler:nil]; - // Video output is added as soon as the status becomes ready to play. - XCTAssertEqual(item.outputs.count, 1); -} - -#if TARGET_OS_IOS -- (void)testVideoPlayerShouldNotOverwritePlayAndRecordNorDefaultToSpeaker { - StubFVPAVFactory *stubFactory = [[StubFVPAVFactory alloc] initWithPlayer:nil - playerItem:nil - pixelBufferSource:nil]; - TestAudioSession *audioSession = [[TestAudioSession alloc] init]; - stubFactory.audioSession = audioSession; - FVPVideoPlayerPlugin *videoPlayerPlugin = - [[FVPVideoPlayerPlugin alloc] initWithAVFactory:stubFactory - displayLinkFactory:nil - binaryMessenger:[[StubBinaryMessenger alloc] init] - textureRegistry:[[TestTextureRegistry alloc] init] - viewProvider:[[StubViewProvider alloc] init] - assetProvider:[[StubAssetProvider alloc] init]]; - - audioSession.category = AVAudioSessionCategoryPlayAndRecord; - audioSession.categoryOptions = AVAudioSessionCategoryOptionDefaultToSpeaker; - - FlutterError *error; - [videoPlayerPlugin initialize:&error]; - [videoPlayerPlugin setMixWithOthers:true error:&error]; - XCTAssert(audioSession.category == AVAudioSessionCategoryPlayAndRecord, - @"Category should be PlayAndRecord."); - XCTAssert(audioSession.categoryOptions & AVAudioSessionCategoryOptionDefaultToSpeaker, - @"Flag DefaultToSpeaker was removed."); - XCTAssert(audioSession.categoryOptions & AVAudioSessionCategoryOptionMixWithOthers, - @"Flag MixWithOthers should be set."); -} - -- (void)testSetMixWithOthersShouldNoOpWhenNoChangesAreRequired { - StubFVPAVFactory *stubFactory = [[StubFVPAVFactory alloc] initWithPlayer:nil - playerItem:nil - pixelBufferSource:nil]; - TestAudioSession *audioSession = [[TestAudioSession alloc] init]; - stubFactory.audioSession = audioSession; - FVPVideoPlayerPlugin *videoPlayerPlugin = - [[FVPVideoPlayerPlugin alloc] initWithAVFactory:stubFactory - displayLinkFactory:nil - binaryMessenger:[[StubBinaryMessenger alloc] init] - textureRegistry:[[TestTextureRegistry alloc] init] - viewProvider:[[StubViewProvider alloc] init] - assetProvider:[[StubAssetProvider alloc] init]]; - - audioSession.category = AVAudioSessionCategoryPlayAndRecord; - audioSession.categoryOptions = - AVAudioSessionCategoryOptionMixWithOthers | AVAudioSessionCategoryOptionDefaultToSpeaker; - - FlutterError *error; - [videoPlayerPlugin setMixWithOthers:true error:&error]; - - XCTAssertFalse(audioSession.setCategoryCalled); -} - -- (void)validateTransformFixForOrientation:(UIImageOrientation)orientation { - AVAssetTrack *track = [[FakeAVAssetTrack alloc] initWithOrientation:orientation]; - CGAffineTransform t = FVPGetStandardizedTransformForTrack(track); - CGSize size = track.naturalSize; - CGFloat expectX, expectY; - switch (orientation) { - case UIImageOrientationUp: - expectX = 0; - expectY = 0; - break; - case UIImageOrientationDown: - expectX = size.width; - expectY = size.height; - break; - case UIImageOrientationLeft: - expectX = 0; - expectY = size.width; - break; - case UIImageOrientationRight: - expectX = size.height; - expectY = 0; - break; - case UIImageOrientationUpMirrored: - expectX = size.width; - expectY = 0; - break; - case UIImageOrientationDownMirrored: - expectX = 0; - expectY = size.height; - break; - case UIImageOrientationLeftMirrored: - expectX = size.height; - expectY = size.width; - break; - case UIImageOrientationRightMirrored: - expectX = 0; - expectY = 0; - break; - } - XCTAssertEqual(t.tx, expectX); - XCTAssertEqual(t.ty, expectY); -} -#endif - -/// Returns a test URL for creating a player from a network source. -- (nonnull NSURL *)mp4TestURL { - return (NSURL *_Nonnull)[NSURL - URLWithString:@"https://flutter.github.io/assets-for-api-docs/assets/videos/bee.mp4"]; -} - -- (nonnull NSObject *)playerItemWithURL:(NSURL *)url - factory:(NSObject *)factory { - return [factory playerItemWithAsset:[factory URLAssetWithURL:url options:nil]]; -} - -#pragma mark - Audio Track Tests - -// Tests getAudioTracks with a regular MP4 video file using real AVFoundation. -// Regular MP4 files do not have media selection groups, so getAudioTracks returns an empty array. -- (void)testGetAudioTracksWithRealMP4Video { - NSObject *realObjectFactory = [[FVPDefaultAVFactory alloc] init]; - FVPVideoPlayer *player = [[FVPVideoPlayer alloc] - initWithPlayerItem:[self playerItemWithURL:self.mp4TestURL factory:realObjectFactory] - avFactory:realObjectFactory - viewProvider:[[StubViewProvider alloc] init]]; - XCTAssertNotNil(player); - - XCTestExpectation *initializedExpectation = [self expectationWithDescription:@"initialized"]; - StubEventListener *listener = - [[StubEventListener alloc] initWithInitializationExpectation:initializedExpectation]; - player.eventListener = listener; - [self waitForExpectationsWithTimeout:30.0 handler:nil]; - - // Now test getAudioTracks - FlutterError *error = nil; - NSArray *result = [player getAudioTracks:&error]; - - XCTAssertNil(error); - XCTAssertNotNil(result); - - // Regular MP4 files do not have media selection groups for audio. - // getAudioTracks only returns selectable audio tracks from HLS streams. - XCTAssertEqual(result.count, 0); - - [player disposeWithError:&error]; -} - -// Tests getAudioTracks with an HLS stream using real AVFoundation. -// HLS streams use media selection groups for audio track selection. -- (void)testGetAudioTracksWithRealHLSStream { - NSObject *realObjectFactory = [[FVPDefaultAVFactory alloc] init]; - NSURL *hlsURL = [NSURL - URLWithString:@"https://flutter.github.io/assets-for-api-docs/assets/videos/hls/bee.m3u8"]; - XCTAssertNotNil(hlsURL); - - FVPVideoPlayer *player = [[FVPVideoPlayer alloc] - initWithPlayerItem:[self playerItemWithURL:hlsURL factory:realObjectFactory] - avFactory:realObjectFactory - viewProvider:[[StubViewProvider alloc] init]]; - XCTAssertNotNil(player); - - XCTestExpectation *initializedExpectation = [self expectationWithDescription:@"initialized"]; - StubEventListener *listener = - [[StubEventListener alloc] initWithInitializationExpectation:initializedExpectation]; - player.eventListener = listener; - [self waitForExpectationsWithTimeout:30.0 handler:nil]; - - // Now test getAudioTracks - FlutterError *error = nil; - NSArray *result = [player getAudioTracks:&error]; - - XCTAssertNil(error); - XCTAssertNotNil(result); - - // For HLS streams with multiple audio options, we get media selection tracks. - // The bee.m3u8 stream may or may not have multiple audio tracks. - // We verify the method returns valid data without crashing. - for (FVPMediaSelectionAudioTrackData *track in result) { - XCTAssertNotNil(track.displayName); - XCTAssertGreaterThanOrEqual(track.index, 0); - } - - [player disposeWithError:&error]; -} - -// Tests that getAudioTracks returns valid data for audio-only files. -// Regular audio files do not have media selection groups, so getAudioTracks returns an empty array. -- (void)testGetAudioTracksWithRealAudioFile { - // TODO(stuartmorgan): Add more use of protocols in FVPVideoPlayer so that this test - // can use a fake item/asset instead of loading an actual remote asset. - NSObject *realObjectFactory = [[FVPDefaultAVFactory alloc] init]; - NSURL *audioURL = [NSURL - URLWithString:@"https://flutter.github.io/assets-for-api-docs/assets/audio/rooster.mp3"]; - XCTAssertNotNil(audioURL); - - FVPVideoPlayer *player = [[FVPVideoPlayer alloc] - initWithPlayerItem:[self playerItemWithURL:audioURL factory:realObjectFactory] - avFactory:realObjectFactory - viewProvider:[[StubViewProvider alloc] init]]; - XCTAssertNotNil(player); - - XCTestExpectation *initializedExpectation = [self expectationWithDescription:@"initialized"]; - StubEventListener *listener = - [[StubEventListener alloc] initWithInitializationExpectation:initializedExpectation]; - player.eventListener = listener; - [self waitForExpectationsWithTimeout:30.0 handler:nil]; - - // Now test getAudioTracks - FlutterError *error = nil; - NSArray *result = [player getAudioTracks:&error]; - - XCTAssertNil(error); - XCTAssertNotNil(result); - - // Regular audio files do not have media selection groups. - // getAudioTracks only returns selectable audio tracks from HLS streams. - XCTAssertEqual(result.count, 0); - - [player disposeWithError:&error]; -} - -// Tests that getAudioTracks works correctly through the plugin API with a real video. -// Regular MP4 files do not have media selection groups, so getAudioTracks returns an empty array. -- (void)testGetAudioTracksViaPluginWithRealVideo { - // TODO(stuartmorgan): Add more use of protocols in FVPVideoPlayer so that this test - // can use a fake item/asset instead of loading an actual remote asset. - NSObject *realObjectFactory = [[FVPDefaultAVFactory alloc] init]; - NSURL *testURL = - [NSURL URLWithString:@"https://flutter.github.io/assets-for-api-docs/assets/videos/bee.mp4"]; - XCTAssertNotNil(testURL); - FVPVideoPlayer *player = [[FVPVideoPlayer alloc] - initWithPlayerItem:[self playerItemWithURL:testURL factory:realObjectFactory] - avFactory:realObjectFactory - viewProvider:[[StubViewProvider alloc] init]]; - - // Wait for player item to become ready - AVPlayerItem *item = player.player.currentItem; - [self keyValueObservingExpectationForObject:(id)item - keyPath:@"status" - expectedValue:@(AVPlayerItemStatusReadyToPlay)]; - [self waitForExpectationsWithTimeout:30.0 handler:nil]; - - // Now test getAudioTracks - FlutterError *error; - NSArray *result = [player getAudioTracks:&error]; - - XCTAssertNil(error); - XCTAssertNotNil(result); - - // Regular MP4 files do not have media selection groups. - // getAudioTracks only returns selectable audio tracks from HLS streams. - XCTAssertEqual(result.count, 0); - - [player disposeWithError:&error]; -} - -- (void)testLoadTracksWithMediaTypeIsCalledOnNewerOS { - if (@available(iOS 15.0, macOS 12.0, *)) { - TestAsset *mockAsset = [[TestAsset alloc] initWithDuration:CMTimeMake(1, 1) tracks:@[]]; - NSObject *item = [[StubPlayerItem alloc] initWithAsset:mockAsset]; - - StubFVPAVFactory *stubAVFactory = [[StubFVPAVFactory alloc] initWithPlayer:nil - playerItem:item - pixelBufferSource:nil]; - StubViewProvider *stubViewProvider = -#if TARGET_OS_OSX - [[StubViewProvider alloc] initWithView:nil]; -#else - [[StubViewProvider alloc] initWithViewController:nil]; -#endif - FVPVideoPlayer *player = [[FVPVideoPlayer alloc] initWithPlayerItem:item - avFactory:stubAVFactory - viewProvider:stubViewProvider]; - XCTAssertNotNil(player); - XCTAssertTrue(mockAsset.loadedTracksAsynchronously); - } -} - -@end diff --git a/packages/video_player/video_player_avfoundation/darwin/RunnerTests/VideoPlayerTests.swift b/packages/video_player/video_player_avfoundation/darwin/RunnerTests/VideoPlayerTests.swift new file mode 100644 index 000000000000..14d46abb2c5a --- /dev/null +++ b/packages/video_player/video_player_avfoundation/darwin/RunnerTests/VideoPlayerTests.swift @@ -0,0 +1,844 @@ +// Copyright 2013 The Flutter Authors +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +import AVFoundation +import Testing +@preconcurrency import video_player_avfoundation + +#if os(iOS) + import Flutter +#else + import FlutterMacOS +#endif + +private let mp4TestURI = + "https://flutter.github.io/assets-for-api-docs/assets/videos/bee.mp4" +private let hlsTestURI = + "https://flutter.github.io/assets-for-api-docs/assets/videos/hls/bee.m3u8" +private let mp3AudioTestURI = + "https://flutter.github.io/assets-for-api-docs/assets/audio/rooster.mp3" +private let hlsAudioTestURI = + "https://flutter.github.io/assets-for-api-docs/assets/videos/hls/bee_audio_only.m3u8" + +@MainActor struct VideoPlayerTests { + + @Test func blankVideoBugWithEncryptedVideoStreamAndInvertedAspectRatioBugForSomeVideoStream() + throws + { + // This is to fix 2 bugs: 1. blank video for encrypted video streams on iOS 16 + // (https://github.com/flutter/flutter/issues/111457) and 2. swapped width and height for some + // video streams (not just iOS 16). (https://github.com/flutter/flutter/issues/109116). An + // invisible AVPlayerLayer is used to overwrite the protection of pixel buffers in those streams + // for issue #1, and restore the correct width and height for issue #2. + #if os(iOS) + let view = UIView(frame: CGRect(x: 0, y: 0, width: 10, height: 10)) + let viewController = UIViewController() + viewController.view = view + let viewProvider = StubViewProvider(viewController: viewController) + #else + let view = NSView(frame: NSRect(x: 0, y: 0, width: 10, height: 10)) + view.wantsLayer = true + let viewProvider = StubViewProvider(view: view) + #endif + let videoPlayerPlugin = createInitializedPlugin(viewProvider: viewProvider) + + var error: FlutterError? + let identifiers = try #require( + videoPlayerPlugin.createTexturePlayer( + with: FVPCreationOptions.make(withUri: mp4TestURI, httpHeaders: [:]), + error: &error)) + #expect(error == nil) + let player = + videoPlayerPlugin.playersByIdentifier[identifiers.playerId] as! FVPTextureBasedVideoPlayer + + #expect(player.playerLayer.superlayer == view.layer) + } + + @Test func playerForPlatformViewDoesNotRegisterTexture() { + let textureRegistry = TestTextureRegistry() + let stubDisplayLinkFactory = StubFVPDisplayLinkFactory() + let videoPlayerPlugin = createInitializedPlugin( + displayLinkFactory: stubDisplayLinkFactory, + textureRegistry: textureRegistry) + + var error: FlutterError? + videoPlayerPlugin.createPlatformViewPlayer( + with: FVPCreationOptions.make(withUri: hlsTestURI, httpHeaders: [:]), + error: &error) + #expect(error == nil) + + #expect(!textureRegistry.registeredTexture) + } + + @Test func seekToWhilePausedStartsDisplayLinkTemporarily() async throws { + let stubDisplayLinkFactory = StubFVPDisplayLinkFactory() + let mockVideoOutput = TestPixelBufferSource() + // Display link and frame updater wire-up is currently done in FVPVideoPlayerPlugin, so create + // the player via the plugin instead of directly to include that logic in the test. + let videoPlayerPlugin = createInitializedPlugin( + avFactory: StubFVPAVFactory(pixelBufferSource: mockVideoOutput), + displayLinkFactory: stubDisplayLinkFactory) + + var error: FlutterError? + let identifiers = try #require( + videoPlayerPlugin.createTexturePlayer( + with: FVPCreationOptions.make(withUri: hlsTestURI, httpHeaders: [:]), + error: &error)) + #expect(error == nil) + let player = + videoPlayerPlugin.playersByIdentifier[identifiers.playerId] as! FVPTextureBasedVideoPlayer + + // Ensure that the video playback is paused before seeking. + player.pauseWithError(&error) + #expect(error == nil) + + await asyncSeekTo(player: player, time: 1234) + + // Seeking to a new position should start the display link temporarily. + #expect(stubDisplayLinkFactory.displayLink.running) + + // Simulate a buffer being available. + var bufferRef: CVPixelBuffer? + CVPixelBufferCreate(nil, 1, 1, kCVPixelFormatType_32BGRA, nil, &bufferRef) + mockVideoOutput.pixelBuffer = bufferRef + // Simulate a callback from the engine to request a new frame. + stubDisplayLinkFactory.fireDisplayLink?() + player.copyPixelBuffer() + // Since a frame was found, and the video is paused, the display link should be paused again. + #expect(!stubDisplayLinkFactory.displayLink.running) + } + + @Test func initStartsDisplayLinkTemporarily() throws { + let stubDisplayLinkFactory = StubFVPDisplayLinkFactory() + let mockVideoOutput = TestPixelBufferSource() + let videoPlayerPlugin = createInitializedPlugin( + avFactory: StubFVPAVFactory(pixelBufferSource: mockVideoOutput), + displayLinkFactory: stubDisplayLinkFactory) + + var error: FlutterError? + let identifiers = try #require( + videoPlayerPlugin.createTexturePlayer( + with: FVPCreationOptions.make(withUri: hlsTestURI, httpHeaders: [:]), + error: &error)) + #expect(error == nil) + + // Init should start the display link temporarily. + #expect(stubDisplayLinkFactory.displayLink.running) + + // Simulate a buffer being available. + var bufferRef: CVPixelBuffer? + CVPixelBufferCreate(nil, 1, 1, kCVPixelFormatType_32BGRA, nil, &bufferRef) + mockVideoOutput.pixelBuffer = bufferRef + // Simulate a callback from the engine to request a new frame. + let player = + videoPlayerPlugin.playersByIdentifier[identifiers.playerId] as! FVPTextureBasedVideoPlayer + stubDisplayLinkFactory.fireDisplayLink?() + player.copyPixelBuffer() + // Since a frame was found, and the video is paused, the display link should be paused again. + #expect(!stubDisplayLinkFactory.displayLink.running) + } + + @Test func seekToWhilePlayingDoesNotStopDisplayLink() async { + let stubDisplayLinkFactory = StubFVPDisplayLinkFactory() + let mockVideoOutput = TestPixelBufferSource() + let videoPlayerPlugin = createInitializedPlugin( + avFactory: StubFVPAVFactory(pixelBufferSource: mockVideoOutput), + displayLinkFactory: stubDisplayLinkFactory) + + var error: FlutterError? + let identifiers = videoPlayerPlugin.createTexturePlayer( + with: FVPCreationOptions.make(withUri: hlsTestURI, httpHeaders: [:]), + error: &error) + #expect(error == nil) + let player = + videoPlayerPlugin.playersByIdentifier[identifiers!.playerId] as! FVPTextureBasedVideoPlayer + + // Ensure that the video is playing before seeking. + player.playWithError(&error) + #expect(error == nil) + + await asyncSeekTo(player: player, time: 1234) + + #expect(stubDisplayLinkFactory.displayLink.running) + + // Simulate a buffer being available. + var bufferRef: CVPixelBuffer? + CVPixelBufferCreate(nil, 1, 1, kCVPixelFormatType_32BGRA, nil, &bufferRef) + mockVideoOutput.pixelBuffer = bufferRef + // Simulate a callback from the engine to request a new frame. + stubDisplayLinkFactory.fireDisplayLink?() + // Since the video was playing, the display link should not be paused after getting a buffer. + #expect(stubDisplayLinkFactory.displayLink.running) + } + + @Test func pauseWhileWaitingForFrameDoesNotStopDisplayLink() { + let stubDisplayLinkFactory = StubFVPDisplayLinkFactory() + // Display link and frame updater wire-up is currently done in FVPVideoPlayerPlugin, so create + // the player via the plugin instead of directly to include that logic in the test. + let videoPlayerPlugin = createInitializedPlugin(displayLinkFactory: stubDisplayLinkFactory) + + var error: FlutterError? + let identifiers = videoPlayerPlugin.createTexturePlayer( + with: FVPCreationOptions.make(withUri: hlsTestURI, httpHeaders: [:]), + error: &error) + #expect(error == nil) + let player = + videoPlayerPlugin.playersByIdentifier[identifiers!.playerId] as! FVPTextureBasedVideoPlayer + + // Run a play/pause cycle to force the pause codepath to run completely. + player.playWithError(&error) + #expect(error == nil) + player.pauseWithError(&error) + #expect(error == nil) + + // Since a buffer hasn't been available yet, the pause should not have stopped the display link. + #expect(stubDisplayLinkFactory.displayLink.running) + } + + @Test func deregistersFromPlayer() throws { + let videoPlayerPlugin = createInitializedPlugin() + + var error: FlutterError? + let identifiers = try #require( + videoPlayerPlugin.createTexturePlayer( + with: FVPCreationOptions.make(withUri: mp4TestURI, httpHeaders: [:]), + error: &error)) + #expect(error == nil) + let player = videoPlayerPlugin.playersByIdentifier[identifiers.playerId] as! FVPVideoPlayer + + player.disposeWithError(&error) + #expect(error == nil) + #expect(videoPlayerPlugin.playersByIdentifier.count == 0) + } + + @Test func bufferingStateFromPlayer() async throws { + // TODO(stuartmorgan): Rewrite this test to use stubs, instead of running for 10 + // seconds with a real player and hoping to get buffer status updates. + let realObjectFactory = FVPDefaultAVFactory() + let videoPlayerPlugin = createInitializedPlugin(avFactory: realObjectFactory) + + var error: FlutterError? + let identifiers = try #require( + videoPlayerPlugin.createTexturePlayer( + with: FVPCreationOptions.make(withUri: mp4TestURI, httpHeaders: [:]), + error: &error)) + #expect(error == nil) + let player = videoPlayerPlugin.playersByIdentifier[identifiers.playerId] as! FVPVideoPlayer + let avPlayer = player.player + avPlayer.play() + + let eventSink: FlutterEventSink = { event in + guard let event = event as? [String: Any], let eventType = event["event"] as? String else { + return + } + if eventType == "bufferingEnd" { + #expect(avPlayer.currentItem!.isPlaybackLikelyToKeepUp) + } + if eventType == "bufferingStart" { + #expect(!avPlayer.currentItem!.isPlaybackLikelyToKeepUp) + } + } + (player.eventListener as? FlutterStreamHandler)?.onListen( + withArguments: nil, eventSink: eventSink) + + // Load for a while to let some buffer events happen. + try await Task.sleep(nanoseconds: 10 * 1_000_000_000) + } + + private func durationApproximatelyEquals(_ actual: Int64, _ expected: Int64, tolerance: Int64) + -> Bool + { + return abs(actual - expected) < tolerance + } + + @Test func videoControls() async throws { + let eventListener = try await sanityTestURI(mp4TestURI) + #expect(eventListener.initializationSize.height == 720) + #expect(eventListener.initializationSize.width == 1280) + #expect(durationApproximatelyEquals(eventListener.initializationDuration, 4000, tolerance: 200)) + } + + @Test func audioControls() async throws { + let eventListener = try await sanityTestURI(mp3AudioTestURI) + #expect(eventListener.initializationSize.height == 0) + #expect(eventListener.initializationSize.width == 0) + #expect(durationApproximatelyEquals(eventListener.initializationDuration, 5400, tolerance: 200)) + } + + @Test func hLSControls() async throws { + let eventListener = try await sanityTestURI(hlsTestURI) + #expect(eventListener.initializationSize.height == 720) + #expect(eventListener.initializationSize.width == 1280) + #expect(durationApproximatelyEquals(eventListener.initializationDuration, 4000, tolerance: 200)) + } + + @Test(.disabled("Flaky"), .bug("https://github.com/flutter/flutter/issues/164381")) + func audioOnlyHLSControls() async throws { + let eventListener = try await sanityTestURI(hlsAudioTestURI) + #expect(eventListener.initializationSize.height == 0) + #expect(eventListener.initializationSize.width == 0) + #expect(durationApproximatelyEquals(eventListener.initializationDuration, 4000, tolerance: 200)) + } + + #if os(iOS) + @Test func transformFixOrientationUp() { + let size = CGSize(width: 800, height: 600) + let naturalTransform = CGAffineTransform.identity + let t = FVPGetStandardizedTrackTransform(naturalTransform, size) + #expect(t.tx == 0) + #expect(t.ty == 0) + } + + @Test func transformFixOrientationDown() { + let size = CGSize(width: 800, height: 600) + let naturalTransform = CGAffineTransform(a: -1, b: 0, c: 0, d: -1, tx: 0, ty: 0) + let t = FVPGetStandardizedTrackTransform(naturalTransform, size) + #expect(t.tx == size.width) + #expect(t.ty == size.height) + } + + @Test func transformFixOrientationLeft() { + let size = CGSize(width: 800, height: 600) + let naturalTransform = CGAffineTransform(a: 0, b: -1, c: 1, d: 0, tx: 0, ty: 0) + let t = FVPGetStandardizedTrackTransform(naturalTransform, size) + #expect(t.tx == 0) + #expect(t.ty == size.width) + } + + @Test func transformFixOrientationRight() { + let size = CGSize(width: 800, height: 600) + let naturalTransform = CGAffineTransform(a: 0, b: 1, c: -1, d: 0, tx: 0, ty: 0) + let t = FVPGetStandardizedTrackTransform(naturalTransform, size) + #expect(t.tx == size.height) + #expect(t.ty == 0) + } + + @Test func transformFixOrientationUpMirrored() { + let size = CGSize(width: 800, height: 600) + let naturalTransform = CGAffineTransform(a: -1, b: 0, c: 0, d: 1, tx: 0, ty: 0) + let t = FVPGetStandardizedTrackTransform(naturalTransform, size) + #expect(t.tx == size.width) + #expect(t.ty == 0) + } + + @Test func transformFixOrientationDownMirrored() { + let size = CGSize(width: 800, height: 600) + let naturalTransform = CGAffineTransform(a: 1, b: 0, c: 0, d: -1, tx: 0, ty: 0) + let t = FVPGetStandardizedTrackTransform(naturalTransform, size) + #expect(t.tx == 0) + #expect(t.ty == size.height) + } + + @Test func transformFixOrientationLeftMirrored() { + let size = CGSize(width: 800, height: 600) + let naturalTransform = CGAffineTransform(a: 0, b: -1, c: -1, d: 0, tx: 0, ty: 0) + let t = FVPGetStandardizedTrackTransform(naturalTransform, size) + #expect(t.tx == size.height) + #expect(t.ty == size.width) + } + + @Test func transformFixOrientationRightMirrored() { + let size = CGSize(width: 800, height: 600) + let naturalTransform = CGAffineTransform(a: 0, b: 1, c: 1, d: 0, tx: 0, ty: 0) + let t = FVPGetStandardizedTrackTransform(naturalTransform, size) + #expect(t.tx == 0) + #expect(t.ty == 0) + } + #endif + + @Test func seekToleranceWhenNotSeekingToEnd() async { + let inspectableAVPlayer = InspectableAVPlayer() + let stubAVFactory = StubFVPAVFactory(player: inspectableAVPlayer) + let player = FVPVideoPlayer( + playerItem: StubPlayerItem(), + avFactory: stubAVFactory, + viewProvider: StubViewProvider()) + let listener = StubEventListener() + player.eventListener = listener + + await asyncSeekTo(player: player, time: 1234) + + #expect(inspectableAVPlayer.beforeTolerance?.intValue == 0) + #expect(inspectableAVPlayer.afterTolerance?.intValue == 0) + } + + @Test func seekToleranceWhenSeekingToEnd() async { + let inspectableAVPlayer = InspectableAVPlayer() + let stubAVFactory = StubFVPAVFactory(player: inspectableAVPlayer) + let player = FVPVideoPlayer( + playerItem: StubPlayerItem(), + avFactory: stubAVFactory, + viewProvider: StubViewProvider()) + let listener = StubEventListener() + player.eventListener = listener + + await asyncSeekTo(player: player, time: 0) + + #expect((inspectableAVPlayer.beforeTolerance?.intValue ?? 0) > 0) + #expect((inspectableAVPlayer.afterTolerance?.intValue ?? 0) > 0) + } + + /// Sanity checks a video player playing the given URL with the actual AVPlayer. This is essentially + /// a mini integration test of the player component. + /// + /// Returns the stub event listener to allow tests to inspect the call state. + func sanityTestURI(_ testURI: String) async throws -> StubEventListener { + let realObjectFactory = FVPDefaultAVFactory() + let testURL = try #require(URL(string: testURI)) + let player = FVPVideoPlayer( + playerItem: playerItem(with: testURL, factory: realObjectFactory), + avFactory: realObjectFactory, + viewProvider: StubViewProvider()) + + let listener = StubEventListener() + await withCheckedContinuation { continuation in + listener.onInitialized = { continuation.resume() } + player.eventListener = listener + } + + // Starts paused. + let avPlayer = player.player + #expect(avPlayer.rate == 0) + #expect(avPlayer.volume == 1) + #expect(avPlayer.timeControlStatus == .paused) + + // Change playback speed. + var error: FlutterError? + player.setPlaybackSpeed(2, error: &error) + #expect(error == nil) + player.playWithError(&error) + #expect(error == nil) + #expect(avPlayer.rate == 2) + #expect(avPlayer.timeControlStatus == .waitingToPlayAtSpecifiedRate) + + // Volume + player.setVolume(0.1, error: &error) + #expect(error == nil) + #expect(avPlayer.volume == 0.1) + + return listener + } + + // Checks whether [AVPlayer rate] KVO observations are correctly detached. + // - https://github.com/flutter/flutter/issues/124937 + // + // Failing to de-register results in a crash in [AVPlayer willChangeValueForKey:]. + @Test func doesNotCrashOnRateObservationAfterDisposal() async throws { + let realObjectFactory = FVPDefaultAVFactory() + + var avPlayer: AVPlayer? = nil + weak var weakPlayer: FVPVideoPlayer? = nil + + // Autoreleasepool is needed to simulate conditions of FVPVideoPlayer deallocation. + try autoreleasepool { + let videoPlayerPlugin = createInitializedPlugin(avFactory: realObjectFactory) + + var error: FlutterError? + let identifiers = try #require( + videoPlayerPlugin.createTexturePlayer( + with: FVPCreationOptions.make(withUri: mp4TestURI, httpHeaders: [:]), + error: &error)) + #expect(error == nil) + + let player = videoPlayerPlugin.playersByIdentifier[identifiers.playerId] as! FVPVideoPlayer + weakPlayer = player + avPlayer = player.player + + player.disposeWithError(&error) + #expect(error == nil) + } + + // Wait for the weak pointer to be invalidated, indicating that the player has been deallocated. + let checkInterval = 0.1 + let maxTries = Int64(30 / checkInterval) + for _ in 1...maxTries { + if weakPlayer == nil { + break + } + try await Task.sleep(nanoseconds: UInt64(checkInterval * 1_000_000_000)) + } + + await MainActor.run { + avPlayer?.willChangeValue(forKey: "rate") + avPlayer?.didChangeValue(forKey: "rate") + } + // No assertions needed. Lack of crash is a success. + } + + // During the hot reload: + // 1. `[FVPVideoPlayer onTextureUnregistered:]` gets called. + // 2. `[FVPVideoPlayerPlugin initialize:]` gets called. + // + // Both of these methods dispatch [FVPVideoPlayer dispose] on the main thread + // leading to a possible crash when de-registering observers twice. + @Test func hotReloadDoesNotCrash() async throws { + weak var weakPlayer: FVPVideoPlayer? = nil + + // Autoreleasepool is needed to simulate conditions of FVPVideoPlayer deallocation. + try autoreleasepool { + let videoPlayerPlugin = createInitializedPlugin(avFactory: StubFVPAVFactory()) + + var error: FlutterError? + let identifiers = try #require( + videoPlayerPlugin.createTexturePlayer( + with: FVPCreationOptions.make(withUri: mp4TestURI, httpHeaders: [:]), + error: &error)) + #expect(error == nil) + + let player = + videoPlayerPlugin.playersByIdentifier[identifiers.playerId] as! FVPTextureBasedVideoPlayer + weakPlayer = player + + player.onTextureUnregistered(StubTexture()) + + videoPlayerPlugin.initialize(&error) + #expect(error == nil) + } + + // Wait for the weak pointer to be invalidated, indicating that the player has been deallocated. + let checkInterval = 0.1 + let maxTries = Int64(30 / checkInterval) + for _ in 1...maxTries { + if weakPlayer == nil { + break + } + try await Task.sleep(nanoseconds: UInt64(checkInterval * 1_000_000_000)) + } + // No assertions needed. Lack of crash is a success. + } + + @Test func failedToLoadVideoEventShouldBeAlwaysSent() async { + // Use real objects to test a real failure flow. + let realObjectFactory = FVPDefaultAVFactory() + let videoPlayerPlugin = createInitializedPlugin(avFactory: realObjectFactory) + + var error: FlutterError? + let identifiers = videoPlayerPlugin.createTexturePlayer( + with: FVPCreationOptions.make(withUri: "", httpHeaders: [:]), error: &error) + #expect(error == nil) + let player = videoPlayerPlugin.playersByIdentifier[identifiers!.playerId] as! FVPVideoPlayer + + await withCheckedContinuation { continuation in + // TODO(stuartmorgan): Update this test to instead use a mock listener, and add separate unit + // tests of FVPEventBridge. + let eventSink: FlutterEventSink = { event in + if event is FlutterError { + continuation.resume() + } + } + (player.eventListener as? FlutterStreamHandler)?.onListen( + withArguments: nil, eventSink: eventSink) + } + } + + @Test func updatePlayingStateShouldNotResetRate() async throws { + let realObjectFactory = FVPDefaultAVFactory() + let testURL = try #require(URL(string: mp4TestURI)) + let player = FVPVideoPlayer( + playerItem: playerItem(with: testURL, factory: realObjectFactory), + avFactory: realObjectFactory, + viewProvider: StubViewProvider()) + + await withCheckedContinuation { continuation in + let listener = StubEventListener(onInitialized: { continuation.resume() }) + player.eventListener = listener + } + + var error: FlutterError? + player.setPlaybackSpeed(2, error: &error) + #expect(error == nil) + player.playWithError(&error) + #expect(error == nil) + #expect(player.player.rate == 2) + } + + @Test func playerShouldNotDropEverySecondFrame() throws { + let textureRegistry = TestTextureRegistry() + let stubDisplayLinkFactory = StubFVPDisplayLinkFactory() + let mockVideoOutput = TestPixelBufferSource() + let videoPlayerPlugin = createInitializedPlugin( + avFactory: StubFVPAVFactory(pixelBufferSource: mockVideoOutput), + displayLinkFactory: stubDisplayLinkFactory, + textureRegistry: textureRegistry) + + var error: FlutterError? + let identifiers = try #require( + videoPlayerPlugin.createTexturePlayer( + with: FVPCreationOptions.make(withUri: mp4TestURI, httpHeaders: [:]), + error: &error)) + #expect(error == nil) + let playerIdentifier = identifiers.playerId + let player = + videoPlayerPlugin.playersByIdentifier[playerIdentifier] as! FVPTextureBasedVideoPlayer + + func addFrame() { + var bufferRef: CVPixelBuffer? + CVPixelBufferCreate(nil, 1, 1, kCVPixelFormatType_32BGRA, nil, &bufferRef) + mockVideoOutput.pixelBuffer = bufferRef + } + + addFrame() + stubDisplayLinkFactory.fireDisplayLink?() + player.copyPixelBuffer() + #expect(textureRegistry.textureFrameAvailableCount == 1) + + addFrame() + stubDisplayLinkFactory.fireDisplayLink?() + player.copyPixelBuffer() + #expect(textureRegistry.textureFrameAvailableCount == 2) + } + + @Test func videoOutputIsAddedWhenAVPlayerIsInitialized() async throws { + let realObjectFactory = FVPDefaultAVFactory() + let videoPlayerPlugin = createInitializedPlugin(avFactory: realObjectFactory) + + var error: FlutterError? + let identifiers = try #require( + videoPlayerPlugin.createTexturePlayer( + with: FVPCreationOptions.make(withUri: mp4TestURI, httpHeaders: [:]), + error: &error)) + #expect(error == nil) + let player = videoPlayerPlugin.playersByIdentifier[identifiers.playerId] as! FVPVideoPlayer + + let listener = StubEventListener() + await withCheckedContinuation { continuation in + listener.onInitialized = { continuation.resume() } + player.eventListener = listener + } + + let item = try #require(player.player.currentItem) + // Video output is added as soon as the status becomes ready to play. + #expect(item.outputs.count == 1) + } + + #if os(iOS) + @Test func videoPlayerShouldNotOverwritePlayAndRecordNorDefaultToSpeaker() { + let stubFactory = StubFVPAVFactory() + let audioSession = TestAudioSession() + stubFactory.audioSession = audioSession + audioSession.category = .playAndRecord + audioSession.categoryOptions = .defaultToSpeaker + let videoPlayerPlugin = createInitializedPlugin(avFactory: stubFactory) + + var error: FlutterError? + videoPlayerPlugin.setMixWithOthers(true, error: &error) + #expect(error == nil) + #expect(audioSession.category == .playAndRecord) + #expect(audioSession.categoryOptions.contains(.defaultToSpeaker)) + #expect(audioSession.categoryOptions.contains(.mixWithOthers)) + } + + @Test func setMixWithOthersShouldNoOpWhenNoChangesAreRequired() { + let stubFactory = StubFVPAVFactory() + let audioSession = TestAudioSession() + stubFactory.audioSession = audioSession + audioSession.category = .playAndRecord + audioSession.categoryOptions = [.mixWithOthers, .defaultToSpeaker] + let videoPlayerPlugin = createInitializedPlugin(avFactory: stubFactory) + + var error: FlutterError? + videoPlayerPlugin.setMixWithOthers(true, error: &error) + #expect(error == nil) + #expect(!audioSession.setCategoryCalled) + } + #endif + + // MARK: - Audio Track Tests + + // Tests getAudioTracks with a regular MP4 video file using real AVFoundation. + // Regular MP4 files do not have media selection groups, so getAudioTracks returns an empty array. + @Test func getAudioTracksWithRealMP4Video() async throws { + let realObjectFactory = FVPDefaultAVFactory() + let testURL = try #require(URL(string: mp4TestURI)) + let player = FVPVideoPlayer( + playerItem: playerItem(with: testURL, factory: realObjectFactory), + avFactory: realObjectFactory, + viewProvider: StubViewProvider()) + + await withCheckedContinuation { continuation in + let listener = StubEventListener(onInitialized: { continuation.resume() }) + player.eventListener = listener + } + + // Now test getAudioTracks + var error: FlutterError? + let result = try #require(player.getAudioTracks(&error)) + #expect(error == nil) + + // Regular MP4 files do not have media selection groups for audio. + // getAudioTracks only returns selectable audio tracks from HLS streams. + #expect(result.count == 0) + + player.disposeWithError(&error) + } + + // Tests getAudioTracks with an HLS stream using real AVFoundation. + // HLS streams use media selection groups for audio track selection. + @Test func getAudioTracksWithRealHLSStream() async throws { + let realObjectFactory = FVPDefaultAVFactory() + let hlsURL = try #require(URL(string: hlsTestURI)) + + let player = FVPVideoPlayer( + playerItem: playerItem(with: hlsURL, factory: realObjectFactory), + avFactory: realObjectFactory, + viewProvider: StubViewProvider()) + + await withCheckedContinuation { continuation in + let listener = StubEventListener(onInitialized: { continuation.resume() }) + player.eventListener = listener + } + + // Now test getAudioTracks + var error: FlutterError? + let result = try #require(player.getAudioTracks(&error)) + #expect(error == nil) + + // For HLS streams with multiple audio options, we get media selection tracks. + // The bee.m3u8 stream may or may not have multiple audio tracks. + // We verify the method returns valid data without crashing. + for track in result { + #expect(track.displayName != nil) + #expect(track.index >= 0) + } + + player.disposeWithError(&error) + } + + // Tests that getAudioTracks returns valid data for audio-only files. + // Regular audio files do not have media selection groups, so getAudioTracks returns an empty array. + @Test func getAudioTracksWithRealAudioFile() async throws { + // TODO(stuartmorgan): Add more use of protocols in FVPVideoPlayer so that this test + // can use a fake item/asset instead of loading an actual remote asset. + let realObjectFactory = FVPDefaultAVFactory() + let audioURL = try #require(URL(string: mp3AudioTestURI)) + + let player = FVPVideoPlayer( + playerItem: playerItem(with: audioURL, factory: realObjectFactory), + avFactory: realObjectFactory, + viewProvider: StubViewProvider()) + + await withCheckedContinuation { continuation in + let listener = StubEventListener(onInitialized: { continuation.resume() }) + player.eventListener = listener + } + + // Now test getAudioTracks + var error: FlutterError? + let result = try #require(player.getAudioTracks(&error)) + #expect(error == nil) + + // Regular audio files do not have media selection groups. + // getAudioTracks only returns selectable audio tracks from HLS streams. + #expect(result.count == 0) + + player.disposeWithError(&error) + } + + // Tests that getAudioTracks works correctly through the plugin API with a real video. + // Regular MP4 files do not have media selection groups, so getAudioTracks returns an empty array. + @Test func getAudioTracksViaPluginWithRealVideo() async throws { + // TODO(stuartmorgan): Add more use of protocols in FVPVideoPlayer so that this test + // can use a fake item/asset instead of loading an actual remote asset. + let realObjectFactory = FVPDefaultAVFactory() + let testURL = try #require(URL(string: mp4TestURI)) + let player = FVPVideoPlayer( + playerItem: playerItem(with: testURL, factory: realObjectFactory), + avFactory: realObjectFactory, + viewProvider: StubViewProvider()) + + // Wait for player to become ready + let listener = StubEventListener() + await withCheckedContinuation { continuation in + listener.onInitialized = { continuation.resume() } + player.eventListener = listener + } + + // Now test getAudioTracks + var error: FlutterError? + let result = try #require(player.getAudioTracks(&error)) + #expect(error == nil) + + // Regular MP4 files do not have media selection groups. + // getAudioTracks only returns selectable audio tracks from HLS streams. + #expect(result.count == 0) + + player.disposeWithError(&error) + } + + @Test func loadTracksWithMediaTypeIsCalledOnNewerOS() { + if #available(iOS 15.0, macOS 12.0, *) { + let mockAsset = TestAsset(duration: CMTimeMake(value: 1, timescale: 1), tracks: []) + let item = StubPlayerItem(asset: mockAsset) + + let stubAVFactory = StubFVPAVFactory(player: nil, playerItem: item, pixelBufferSource: nil) + let stubViewProvider = StubViewProvider() + let _ = FVPVideoPlayer( + playerItem: item, avFactory: stubAVFactory, viewProvider: stubViewProvider) + #expect(mockAsset.loadedTracksAsynchronously) + } + } + + // MARK: - Helper Methods + + /// Creates a plugin with the given dependencies, and default stubs for any that aren't provided, + /// then initializes it. + private func createInitializedPlugin( + avFactory: FVPAVFactory = StubFVPAVFactory(), + displayLinkFactory: FVPDisplayLinkFactory = StubFVPDisplayLinkFactory(), + binaryMessenger: FlutterBinaryMessenger = StubBinaryMessenger(), + textureRegistry: FlutterTextureRegistry = TestTextureRegistry(), + viewProvider: FVPViewProvider = StubViewProvider(), + assetProvider: FVPAssetProvider = StubAssetProvider() + ) -> FVPVideoPlayerPlugin { + let plugin = FVPVideoPlayerPlugin( + avFactory: avFactory, + displayLinkFactory: displayLinkFactory, + binaryMessenger: binaryMessenger, + textureRegistry: textureRegistry, + viewProvider: viewProvider, + assetProvider: assetProvider) + var error: FlutterError? + plugin.initialize(&error) + #expect(error == nil) + return plugin + } + + private func playerItem(with url: URL, factory: FVPAVFactory) -> FVPAVPlayerItem { + let asset = factory.urlAsset(with: url, options: nil) + return factory.playerItem(with: asset) + } + + private func waitForPlayerItemStatus(_ item: AVPlayerItem, state: AVPlayerItem.Status) async { + await withCheckedContinuation { continuation in + // Check whether it already has the desired status. + if item.status == state { + continuation.resume() + return + } + // If not, wait for that status. + var observation: NSKeyValueObservation? + observation = item.observe(\.status, options: [.initial, .new]) { + [observation = observation] _, change in + if change.newValue == state { + observation?.invalidate() + continuation.resume() + } + } + } + } + + // TODO(stuartmorgan): Remove this in favor of just `await player.seek(...)` once + // Pigeon is generating Swift 6-friendly output. Currently using the automatic async + // conversion generates warnings due to the lack of concurrency annotations + // ("non-sendable type 'FlutterError?' returned by implicitly asynchronous call to + // nonisolated function cannot cross actor boundary"). + private func asyncSeekTo(player: FVPVideoPlayer, time: Int) async { + await withCheckedContinuation { continuation in + player.seek(to: time) { error in + #expect(error == nil) + continuation.resume() + } + } + } +} diff --git a/packages/video_player/video_player_avfoundation/darwin/video_player_avfoundation/Sources/video_player_avfoundation/AVAssetTrackUtils.m b/packages/video_player/video_player_avfoundation/darwin/video_player_avfoundation/Sources/video_player_avfoundation/AVAssetTrackUtils.m index 11c1ff285376..d104ea422b56 100644 --- a/packages/video_player/video_player_avfoundation/darwin/video_player_avfoundation/Sources/video_player_avfoundation/AVAssetTrackUtils.m +++ b/packages/video_player/video_player_avfoundation/darwin/video_player_avfoundation/Sources/video_player_avfoundation/AVAssetTrackUtils.m @@ -4,9 +4,7 @@ @import AVFoundation; -CGAffineTransform FVPGetStandardizedTransformForTrack(AVAssetTrack *track) { - CGAffineTransform t = track.preferredTransform; - CGSize size = track.naturalSize; +CGAffineTransform FVPGetStandardizedTrackTransform(CGAffineTransform t, CGSize size) { // Each case of control flows corresponds to a specific // `UIImageOrientation`, with 8 cases in total. if (t.a == 1 && t.b == 0 && t.c == 0 && t.d == 1) { diff --git a/packages/video_player/video_player_avfoundation/darwin/video_player_avfoundation/Sources/video_player_avfoundation/FVPVideoPlayer.m b/packages/video_player/video_player_avfoundation/darwin/video_player_avfoundation/Sources/video_player_avfoundation/FVPVideoPlayer.m index f57489cfc2b5..2270120378d5 100644 --- a/packages/video_player/video_player_avfoundation/darwin/video_player_avfoundation/Sources/video_player_avfoundation/FVPVideoPlayer.m +++ b/packages/video_player/video_player_avfoundation/darwin/video_player_avfoundation/Sources/video_player_avfoundation/FVPVideoPlayer.m @@ -90,7 +90,8 @@ - (instancetype)initWithPlayerItem:(NSObject *)item if ([videoTrack statusOfValueForKey:@"preferredTransform" error:nil] == AVKeyValueStatusLoaded) { // Rotate the video by using a videoComposition and the preferredTransform - self->_preferredTransform = FVPGetStandardizedTransformForTrack(videoTrack); + self->_preferredTransform = FVPGetStandardizedTrackTransform( + videoTrack.preferredTransform, videoTrack.naturalSize); // Do not use video composition when it is not needed. if (CGAffineTransformIsIdentity(self->_preferredTransform)) { return; diff --git a/packages/video_player/video_player_avfoundation/darwin/video_player_avfoundation/Sources/video_player_avfoundation/include/video_player_avfoundation/AVAssetTrackUtils.h b/packages/video_player/video_player_avfoundation/darwin/video_player_avfoundation/Sources/video_player_avfoundation/include/video_player_avfoundation/AVAssetTrackUtils.h index 19086b10e430..703a24abce2a 100644 --- a/packages/video_player/video_player_avfoundation/darwin/video_player_avfoundation/Sources/video_player_avfoundation/include/video_player_avfoundation/AVAssetTrackUtils.h +++ b/packages/video_player/video_player_avfoundation/darwin/video_player_avfoundation/Sources/video_player_avfoundation/include/video_player_avfoundation/AVAssetTrackUtils.h @@ -4,9 +4,9 @@ @import AVFoundation; -/// Returns a standardized transform -/// according to the orientation of the track. +/// Returns a standardized transform according to the orientation of a track with the given +/// information. /// /// Note: https://stackoverflow.com/questions/64161544 /// `AVAssetTrack.preferredTransform` can have wrong `tx` and `ty`. -CGAffineTransform FVPGetStandardizedTransformForTrack(AVAssetTrack *track); +CGAffineTransform FVPGetStandardizedTrackTransform(CGAffineTransform t, CGSize size); diff --git a/packages/video_player/video_player_avfoundation/darwin/video_player_avfoundation/Sources/video_player_avfoundation/include/video_player_avfoundation/FVPTextureBasedVideoPlayer_Test.h b/packages/video_player/video_player_avfoundation/darwin/video_player_avfoundation/Sources/video_player_avfoundation/include/video_player_avfoundation/FVPTextureBasedVideoPlayer_Test.h index 2a83455cef65..cb51d28b542e 100644 --- a/packages/video_player/video_player_avfoundation/darwin/video_player_avfoundation/Sources/video_player_avfoundation/include/video_player_avfoundation/FVPTextureBasedVideoPlayer_Test.h +++ b/packages/video_player/video_player_avfoundation/darwin/video_player_avfoundation/Sources/video_player_avfoundation/include/video_player_avfoundation/FVPTextureBasedVideoPlayer_Test.h @@ -23,7 +23,7 @@ NS_ASSUME_NONNULL_BEGIN /// Called when the texture is unregistered. /// This method is used to clean up resources associated with the texture. -- (void)onTextureUnregistered:(nullable NSObject *)texture; +- (void)onTextureUnregistered:(NSObject *)texture; @end NS_ASSUME_NONNULL_END diff --git a/packages/video_player/video_player_avfoundation/darwin/video_player_avfoundation/Sources/video_player_avfoundation/include/video_player_avfoundation/FVPVideoPlayerPlugin.h b/packages/video_player/video_player_avfoundation/darwin/video_player_avfoundation/Sources/video_player_avfoundation/include/video_player_avfoundation/FVPVideoPlayerPlugin.h index 9f15d53b0821..447d38731380 100644 --- a/packages/video_player/video_player_avfoundation/darwin/video_player_avfoundation/Sources/video_player_avfoundation/include/video_player_avfoundation/FVPVideoPlayerPlugin.h +++ b/packages/video_player/video_player_avfoundation/darwin/video_player_avfoundation/Sources/video_player_avfoundation/include/video_player_avfoundation/FVPVideoPlayerPlugin.h @@ -8,6 +8,10 @@ @import Flutter; #endif +NS_ASSUME_NONNULL_BEGIN + @interface FVPVideoPlayerPlugin : NSObject - (instancetype)initWithRegistrar:(NSObject *)registrar; @end + +NS_ASSUME_NONNULL_END diff --git a/packages/video_player/video_player_avfoundation/darwin/video_player_avfoundation/Sources/video_player_avfoundation/include/video_player_avfoundation/FVPVideoPlayerPlugin_Test.h b/packages/video_player/video_player_avfoundation/darwin/video_player_avfoundation/Sources/video_player_avfoundation/include/video_player_avfoundation/FVPVideoPlayerPlugin_Test.h index 4da66be36023..b9ba034edc8d 100644 --- a/packages/video_player/video_player_avfoundation/darwin/video_player_avfoundation/Sources/video_player_avfoundation/include/video_player_avfoundation/FVPVideoPlayerPlugin_Test.h +++ b/packages/video_player/video_player_avfoundation/darwin/video_player_avfoundation/Sources/video_player_avfoundation/include/video_player_avfoundation/FVPVideoPlayerPlugin_Test.h @@ -17,6 +17,8 @@ #import "FVPViewProvider.h" #import "messages.g.h" +NS_ASSUME_NONNULL_BEGIN + // Protocol for an AVPlayer instance factory. Used for injecting display links in tests. @protocol FVPDisplayLinkFactory - (NSObject *)displayLinkWithViewProvider:(NSObject *)viewProvider @@ -38,3 +40,5 @@ assetProvider:(NSObject *)assetProvider; @end + +NS_ASSUME_NONNULL_END diff --git a/packages/video_player/video_player_avfoundation/example/ios/Flutter/Debug.xcconfig b/packages/video_player/video_player_avfoundation/example/ios/Flutter/Debug.xcconfig index e8efba114687..ec97fc6f3021 100644 --- a/packages/video_player/video_player_avfoundation/example/ios/Flutter/Debug.xcconfig +++ b/packages/video_player/video_player_avfoundation/example/ios/Flutter/Debug.xcconfig @@ -1,2 +1,2 @@ -#include "Pods/Target Support Files/Pods-Runner/Pods-Runner.debug.xcconfig" +#include? "Pods/Target Support Files/Pods-Runner/Pods-Runner.debug.xcconfig" #include "Generated.xcconfig" diff --git a/packages/video_player/video_player_avfoundation/example/ios/Flutter/Release.xcconfig b/packages/video_player/video_player_avfoundation/example/ios/Flutter/Release.xcconfig index 399e9340e6f6..c4855bfe2000 100644 --- a/packages/video_player/video_player_avfoundation/example/ios/Flutter/Release.xcconfig +++ b/packages/video_player/video_player_avfoundation/example/ios/Flutter/Release.xcconfig @@ -1,2 +1,2 @@ -#include "Pods/Target Support Files/Pods-Runner/Pods-Runner.release.xcconfig" +#include? "Pods/Target Support Files/Pods-Runner/Pods-Runner.release.xcconfig" #include "Generated.xcconfig" diff --git a/packages/video_player/video_player_avfoundation/example/ios/Runner.xcodeproj/project.pbxproj b/packages/video_player/video_player_avfoundation/example/ios/Runner.xcodeproj/project.pbxproj index f4ad957fd95d..54ab55a992ce 100644 --- a/packages/video_player/video_player_avfoundation/example/ios/Runner.xcodeproj/project.pbxproj +++ b/packages/video_player/video_player_avfoundation/example/ios/Runner.xcodeproj/project.pbxproj @@ -8,6 +8,9 @@ /* Begin PBXBuildFile section */ 1498D2341E8E89220040F4C2 /* GeneratedPluginRegistrant.m in Sources */ = {isa = PBXBuildFile; fileRef = 1498D2331E8E89220040F4C2 /* GeneratedPluginRegistrant.m */; }; + 331585C52F36433100FACB51 /* VideoPlayerUITests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 331585C42F36433100FACB51 /* VideoPlayerUITests.swift */; }; + 335542AD2F364D080081D0DC /* VideoPlayerTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 335542AC2F364D080081D0DC /* VideoPlayerTests.swift */; }; + 335542B02F366B4F0081D0DC /* TestClasses.swift in Sources */ = {isa = PBXBuildFile; fileRef = 335542AF2F366B4F0081D0DC /* TestClasses.swift */; }; 3B3967161E833CAA004F5970 /* AppFrameworkInfo.plist in Resources */ = {isa = PBXBuildFile; fileRef = 3B3967151E833CAA004F5970 /* AppFrameworkInfo.plist */; }; 78A318202AECB46A00862997 /* FlutterGeneratedPluginSwiftPackage in Frameworks */ = {isa = PBXBuildFile; productRef = 78A3181F2AECB46A00862997 /* FlutterGeneratedPluginSwiftPackage */; }; 978B8F6F1D3862AE00F588F7 /* AppDelegate.m in Sources */ = {isa = PBXBuildFile; fileRef = 7AFFD8EE1D35381100E5BB4D /* AppDelegate.m */; }; @@ -17,8 +20,6 @@ 97C147011CF9000F007C117D /* LaunchScreen.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 97C146FF1CF9000F007C117D /* LaunchScreen.storyboard */; }; B0F5C77B94E32FB72444AE9F /* libPods-Runner.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 20721C28387E1F78689EC502 /* libPods-Runner.a */; }; D182ECB59C06DBC7E2D5D913 /* libPods-RunnerTests.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 7BD232FD3BD3343A5F52AF50 /* libPods-RunnerTests.a */; }; - F7151F2F26603EBD0028CB91 /* VideoPlayerUITests.m in Sources */ = {isa = PBXBuildFile; fileRef = F7151F2E26603EBD0028CB91 /* VideoPlayerUITests.m */; }; - F7151F3D26603ECA0028CB91 /* VideoPlayerTests.m in Sources */ = {isa = PBXBuildFile; fileRef = F7151F3C26603ECA0028CB91 /* VideoPlayerTests.m */; }; /* End PBXBuildFile section */ /* Begin PBXContainerItemProxy section */ @@ -56,6 +57,9 @@ 1498D2331E8E89220040F4C2 /* GeneratedPluginRegistrant.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GeneratedPluginRegistrant.m; sourceTree = ""; }; 20721C28387E1F78689EC502 /* libPods-Runner.a */ = {isa = PBXFileReference; explicitFileType = archive.ar; includeInIndex = 0; path = "libPods-Runner.a"; sourceTree = BUILT_PRODUCTS_DIR; }; 2A2EA522BDC492279A91AB75 /* Pods-RunnerTests.release.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-RunnerTests.release.xcconfig"; path = "Pods/Target Support Files/Pods-RunnerTests/Pods-RunnerTests.release.xcconfig"; sourceTree = ""; }; + 331585C42F36433100FACB51 /* VideoPlayerUITests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = VideoPlayerUITests.swift; sourceTree = ""; }; + 335542AC2F364D080081D0DC /* VideoPlayerTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; name = VideoPlayerTests.swift; path = ../../darwin/RunnerTests/VideoPlayerTests.swift; sourceTree = SOURCE_ROOT; }; + 335542AF2F366B4F0081D0DC /* TestClasses.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; name = TestClasses.swift; path = ../../darwin/RunnerTests/TestClasses.swift; sourceTree = SOURCE_ROOT; }; 3B3967151E833CAA004F5970 /* AppFrameworkInfo.plist */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.plist.xml; name = AppFrameworkInfo.plist; path = Flutter/AppFrameworkInfo.plist; sourceTree = ""; }; 6CDC4DA5940705A6E7671616 /* Pods-RunnerTests.debug.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-RunnerTests.debug.xcconfig"; path = "Pods/Target Support Files/Pods-RunnerTests/Pods-RunnerTests.debug.xcconfig"; sourceTree = ""; }; 7AFA3C8E1D35360C0083082E /* Release.xcconfig */ = {isa = PBXFileReference; lastKnownFileType = text.xcconfig; name = Release.xcconfig; path = Flutter/Release.xcconfig; sourceTree = ""; }; @@ -73,10 +77,8 @@ B15EC39F4617FE1082B18834 /* Pods-Runner.release.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-Runner.release.xcconfig"; path = "Pods/Target Support Files/Pods-Runner/Pods-Runner.release.xcconfig"; sourceTree = ""; }; C18C242FF01156F58C0DAF1C /* Pods-Runner.debug.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-Runner.debug.xcconfig"; path = "Pods/Target Support Files/Pods-Runner/Pods-Runner.debug.xcconfig"; sourceTree = ""; }; F7151F2C26603EBD0028CB91 /* RunnerUITests.xctest */ = {isa = PBXFileReference; explicitFileType = wrapper.cfbundle; includeInIndex = 0; path = RunnerUITests.xctest; sourceTree = BUILT_PRODUCTS_DIR; }; - F7151F2E26603EBD0028CB91 /* VideoPlayerUITests.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = VideoPlayerUITests.m; sourceTree = ""; }; F7151F3026603EBD0028CB91 /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = ""; }; F7151F3A26603ECA0028CB91 /* RunnerTests.xctest */ = {isa = PBXFileReference; explicitFileType = wrapper.cfbundle; includeInIndex = 0; path = RunnerTests.xctest; sourceTree = BUILT_PRODUCTS_DIR; }; - F7151F3C26603ECA0028CB91 /* VideoPlayerTests.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; name = VideoPlayerTests.m; path = ../../../darwin/RunnerTests/VideoPlayerTests.m; sourceTree = ""; }; F7151F3E26603ECA0028CB91 /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = ""; }; /* End PBXFileReference section */ @@ -189,7 +191,7 @@ F7151F2D26603EBD0028CB91 /* RunnerUITests */ = { isa = PBXGroup; children = ( - F7151F2E26603EBD0028CB91 /* VideoPlayerUITests.m */, + 331585C42F36433100FACB51 /* VideoPlayerUITests.swift */, F7151F3026603EBD0028CB91 /* Info.plist */, ); path = RunnerUITests; @@ -198,7 +200,8 @@ F7151F3B26603ECA0028CB91 /* RunnerTests */ = { isa = PBXGroup; children = ( - F7151F3C26603ECA0028CB91 /* VideoPlayerTests.m */, + 335542AC2F364D080081D0DC /* VideoPlayerTests.swift */, + 335542AF2F366B4F0081D0DC /* TestClasses.swift */, F7151F3E26603ECA0028CB91 /* Info.plist */, ); path = RunnerTests; @@ -265,8 +268,6 @@ F7151F4026603ECA0028CB91 /* PBXTargetDependency */, ); name = RunnerTests; - packageProductDependencies = ( - ); productName = RunnerTests; productReference = F7151F3A26603ECA0028CB91 /* RunnerTests.xctest */; productType = "com.apple.product-type.bundle.unit-test"; @@ -285,11 +286,13 @@ }; F7151F2B26603EBD0028CB91 = { CreatedOnToolsVersion = 12.5; + LastSwiftMigration = 2620; ProvisioningStyle = Automatic; TestTargetID = 97C146ED1CF9000F007C117D; }; F7151F3926603ECA0028CB91 = { CreatedOnToolsVersion = 12.5; + LastSwiftMigration = 2620; ProvisioningStyle = Automatic; TestTargetID = 97C146ED1CF9000F007C117D; }; @@ -354,12 +357,10 @@ ); inputPaths = ( "${PODS_ROOT}/Target Support Files/Pods-Runner/Pods-Runner-resources.sh", - "${PODS_CONFIGURATION_BUILD_DIR}/path_provider_foundation/path_provider_foundation_privacy.bundle", "${PODS_CONFIGURATION_BUILD_DIR}/video_player_avfoundation/video_player_avfoundation_privacy.bundle", ); name = "[CP] Copy Pods Resources"; outputPaths = ( - "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/path_provider_foundation_privacy.bundle", "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/video_player_avfoundation_privacy.bundle", ); runOnlyForDeploymentPostprocessing = 0; @@ -455,7 +456,7 @@ isa = PBXSourcesBuildPhase; buildActionMask = 2147483647; files = ( - F7151F2F26603EBD0028CB91 /* VideoPlayerUITests.m in Sources */, + 331585C52F36433100FACB51 /* VideoPlayerUITests.swift in Sources */, ); runOnlyForDeploymentPostprocessing = 0; }; @@ -463,7 +464,8 @@ isa = PBXSourcesBuildPhase; buildActionMask = 2147483647; files = ( - F7151F3D26603ECA0028CB91 /* VideoPlayerTests.m in Sources */, + 335542AD2F364D080081D0DC /* VideoPlayerTests.swift in Sources */, + 335542B02F366B4F0081D0DC /* TestClasses.swift in Sources */, ); runOnlyForDeploymentPostprocessing = 0; }; @@ -659,6 +661,7 @@ F7151F3326603EBD0028CB91 /* Debug */ = { isa = XCBuildConfiguration; buildSettings = { + CLANG_ENABLE_MODULES = YES; CODE_SIGN_STYLE = Automatic; INFOPLIST_FILE = RunnerUITests/Info.plist; LD_RUNPATH_SEARCH_PATHS = ( @@ -669,6 +672,8 @@ MTL_FAST_MATH = YES; PRODUCT_BUNDLE_IDENTIFIER = dev.flutter.plugins.RunnerUITests; PRODUCT_NAME = "$(TARGET_NAME)"; + SWIFT_OPTIMIZATION_LEVEL = "-Onone"; + SWIFT_VERSION = 6.0; TEST_TARGET_NAME = Runner; }; name = Debug; @@ -676,6 +681,7 @@ F7151F3426603EBD0028CB91 /* Release */ = { isa = XCBuildConfiguration; buildSettings = { + CLANG_ENABLE_MODULES = YES; CODE_SIGN_STYLE = Automatic; INFOPLIST_FILE = RunnerUITests/Info.plist; LD_RUNPATH_SEARCH_PATHS = ( @@ -686,6 +692,7 @@ MTL_FAST_MATH = YES; PRODUCT_BUNDLE_IDENTIFIER = dev.flutter.plugins.RunnerUITests; PRODUCT_NAME = "$(TARGET_NAME)"; + SWIFT_VERSION = 6.0; TEST_TARGET_NAME = Runner; }; name = Release; @@ -695,6 +702,7 @@ baseConfigurationReference = 6CDC4DA5940705A6E7671616 /* Pods-RunnerTests.debug.xcconfig */; buildSettings = { BUNDLE_LOADER = "$(TEST_HOST)"; + CLANG_ENABLE_MODULES = YES; CODE_SIGN_STYLE = Automatic; INFOPLIST_FILE = RunnerTests/Info.plist; LD_RUNPATH_SEARCH_PATHS = ( @@ -705,6 +713,8 @@ MTL_FAST_MATH = YES; PRODUCT_BUNDLE_IDENTIFIER = dev.flutter.plugins.RunnerTests; PRODUCT_NAME = "$(TARGET_NAME)"; + SWIFT_OPTIMIZATION_LEVEL = "-Onone"; + SWIFT_VERSION = 6.0; TEST_HOST = "$(BUILT_PRODUCTS_DIR)/Runner.app/Runner"; }; name = Debug; @@ -714,6 +724,7 @@ baseConfigurationReference = 2A2EA522BDC492279A91AB75 /* Pods-RunnerTests.release.xcconfig */; buildSettings = { BUNDLE_LOADER = "$(TEST_HOST)"; + CLANG_ENABLE_MODULES = YES; CODE_SIGN_STYLE = Automatic; INFOPLIST_FILE = RunnerTests/Info.plist; LD_RUNPATH_SEARCH_PATHS = ( @@ -724,6 +735,7 @@ MTL_FAST_MATH = YES; PRODUCT_BUNDLE_IDENTIFIER = dev.flutter.plugins.RunnerTests; PRODUCT_NAME = "$(TARGET_NAME)"; + SWIFT_VERSION = 6.0; TEST_HOST = "$(BUILT_PRODUCTS_DIR)/Runner.app/Runner"; }; name = Release; diff --git a/packages/video_player/video_player_avfoundation/example/ios/Runner.xcodeproj/xcshareddata/xcschemes/Runner.xcscheme b/packages/video_player/video_player_avfoundation/example/ios/Runner.xcodeproj/xcshareddata/xcschemes/Runner.xcscheme index d7730d34dabc..6ef3fa75e7f3 100644 --- a/packages/video_player/video_player_avfoundation/example/ios/Runner.xcodeproj/xcshareddata/xcschemes/Runner.xcscheme +++ b/packages/video_player/video_player_avfoundation/example/ios/Runner.xcodeproj/xcshareddata/xcschemes/Runner.xcscheme @@ -44,6 +44,7 @@ buildConfiguration = "Debug" selectedDebuggerIdentifier = "Xcode.DebuggerFoundation.Debugger.LLDB" selectedLauncherIdentifier = "Xcode.DebuggerFoundation.Launcher.LLDB" + customLLDBInitFile = "$(SRCROOT)/Flutter/ephemeral/flutter_lldbinit" shouldUseLaunchSchemeArgsEnv = "YES">