VideoPlayer.mm 13 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484
  1. #include "VideoPlayer.h"
  2. #include "CVTextureCache.h"
  3. #include "CMVideoSampling.h"
  4. #import <AVFoundation/AVFoundation.h>
  5. static void* _ObserveItemStatusContext = (void*)0x1;
  6. static void* _ObservePlayerItemContext = (void*)0x2;
  7. @implementation VideoPlayerView
  8. + (Class)layerClass
  9. {
  10. return [AVPlayerLayer class];
  11. }
  12. - (AVPlayer*)player
  13. {
  14. return [(AVPlayerLayer*)[self layer] player];
  15. }
  16. - (void)setPlayer:(AVPlayer*)player
  17. {
  18. [(AVPlayerLayer*)[self layer] setPlayer: player];
  19. }
  20. - (void)dealloc
  21. {
  22. self.player = nil;
  23. }
  24. @end
  25. @implementation VideoPlayer
  26. {
  27. AVPlayerItem* _playerItem;
  28. AVPlayer* _player;
  29. AVAssetReader* _reader;
  30. AVAssetReaderTrackOutput* _videoOut;
  31. CMSampleBufferRef _cmSampleBuffer;
  32. CMVideoSampling _videoSampling;
  33. CMTime _duration;
  34. CMTime _curTime;
  35. CMTime _curFrameTimestamp;
  36. CMTime _lastFrameTimestamp;
  37. CGSize _videoSize;
  38. BOOL _playerReady;
  39. // we need to have both because the order of asset/item getting ready is not strict
  40. BOOL _assetReady;
  41. BOOL _itemReady;
  42. }
  43. @synthesize delegate;
  44. @synthesize player = _player;
  45. - (BOOL)readyToPlay { return _playerReady; }
  46. - (CGSize)videoSize { return _videoSize; }
  47. - (CMTime)duration { return _duration; }
  48. - (float)durationSeconds { return CMTIME_IS_VALID(_duration) ? (float)CMTimeGetSeconds(_duration) : 0.0f; }
  49. + (BOOL)CanPlayToTexture:(NSURL*)url { return [url isFileURL]; }
  50. + (BOOL)CheckScalingModeAspectFill:(CGSize)videoSize screenSize:(CGSize)screenSize
  51. {
  52. BOOL ret = NO;
  53. CGFloat screenAspect = (screenSize.width / screenSize.height);
  54. CGFloat videoAspect = (videoSize.width / videoSize.height);
  55. CGFloat width = ceilf(videoSize.width * videoAspect / screenAspect);
  56. CGFloat height = ceilf(videoSize.height * videoAspect / screenAspect);
  57. // Do additional input video and device resolution aspect ratio
  58. // rounding check to see if the width and height values are still
  59. // the ~same.
  60. //
  61. // If they still match, we can change the video scaling mode from
  62. // aspectFit to aspectFill, this works around some off-by-one scaling
  63. // errors with certain screen size and video resolution combos
  64. //
  65. // TODO: Shouldn't harm to extend width/height check to
  66. // match values within -1..+1 range from the original
  67. if (videoSize.width == width && videoSize.height == height)
  68. {
  69. ret = YES;
  70. }
  71. return ret;
  72. }
  73. - (void)reportError:(NSError*)error category:(const char*)category
  74. {
  75. ::printf("[%s]Error: %s\n", category, [[error localizedDescription] UTF8String]);
  76. ::printf("%s\n", [[error localizedFailureReason] UTF8String]);
  77. [delegate onPlayerError: error];
  78. }
  79. - (void)reportErrorWithString:(const char*)error category:(const char*)category
  80. {
  81. ::printf("[%s]Error: %s\n", category, error);
  82. [delegate onPlayerError: nil];
  83. }
  84. - (id)init
  85. {
  86. if ((self = [super init]))
  87. {
  88. _duration = _curTime = kCMTimeZero;
  89. _curFrameTimestamp = _lastFrameTimestamp = kCMTimeZero;
  90. }
  91. return self;
  92. }
  93. - (void)cleanupCVTextureCache
  94. {
  95. if (_cmSampleBuffer)
  96. {
  97. CFRelease(_cmSampleBuffer);
  98. _cmSampleBuffer = 0;
  99. }
  100. CMVideoSampling_Uninitialize(&_videoSampling);
  101. }
  102. - (void)cleanupAssetReader
  103. {
  104. if (_reader)
  105. [_reader cancelReading];
  106. _reader = nil;
  107. _videoOut = nil;
  108. }
  109. - (void)cleanupPlayer
  110. {
  111. if (_player)
  112. {
  113. [[NSNotificationCenter defaultCenter] removeObserver: self name: AVAudioSessionRouteChangeNotification object: nil];
  114. [_player.currentItem removeObserver: self forKeyPath: @"status"];
  115. [_player removeObserver: self forKeyPath: @"currentItem"];
  116. [_player pause];
  117. _player = nil;
  118. }
  119. if (_playerItem)
  120. {
  121. [[NSNotificationCenter defaultCenter] removeObserver: self name: AVPlayerItemDidPlayToEndTimeNotification object: _playerItem];
  122. _playerItem = nil;
  123. }
  124. }
  125. - (void)unloadPlayer
  126. {
  127. [self cleanupCVTextureCache];
  128. [self cleanupAssetReader];
  129. [self cleanupPlayer];
  130. _videoSize = CGSizeMake(0, 0);
  131. _duration = _curTime = kCMTimeZero;
  132. _curFrameTimestamp = _lastFrameTimestamp = kCMTimeZero;
  133. self->_playerReady = self->_assetReady = self->_itemReady = NO;
  134. }
  135. - (BOOL)loadVideo:(NSURL*)url
  136. {
  137. AVURLAsset* asset = [AVURLAsset URLAssetWithURL: url options: nil];
  138. if (!asset)
  139. return NO;
  140. NSArray* requestedKeys = @[@"tracks", @"playable"];
  141. [asset loadValuesAsynchronouslyForKeys: requestedKeys completionHandler:^{
  142. dispatch_async(dispatch_get_main_queue(), ^{
  143. [self prepareAsset: asset withKeys: requestedKeys];
  144. });
  145. }];
  146. return YES;
  147. }
  148. - (BOOL)_playWithPrepareBlock:(BOOL (^)())preparePlaybackBlock
  149. {
  150. if (!_playerReady)
  151. return NO;
  152. if (preparePlaybackBlock && preparePlaybackBlock() == NO)
  153. return NO;
  154. // do not do seekTo and setRate here, it seems that http streaming may hang sometimes if you do so. go figure
  155. _curFrameTimestamp = _lastFrameTimestamp = kCMTimeZero;
  156. [_player play];
  157. return YES;
  158. }
  159. - (BOOL)playToView:(VideoPlayerView*)view
  160. {
  161. return [self _playWithPrepareBlock:^() {
  162. view.player = self->_player;
  163. return YES;
  164. }];
  165. }
  166. - (BOOL)playToTexture
  167. {
  168. return [self _playWithPrepareBlock:^() {
  169. return [self prepareReader];
  170. }];
  171. }
  172. - (BOOL)playVideoPlayer
  173. {
  174. return [self _playWithPrepareBlock: nil];
  175. }
  176. - (BOOL)isPlaying { return _playerReady && _player.rate != 0.0f; }
  177. - (void)pause
  178. {
  179. if (_playerReady && _player.rate != 0.0f)
  180. [_player pause];
  181. }
  182. - (void)resume
  183. {
  184. if (_playerReady && _player.rate == 0.0f)
  185. {
  186. [self seekToTimestamp: _player.currentTime];
  187. [_player play];
  188. }
  189. }
  190. - (void)rewind { [self seekToTimestamp: kCMTimeZero]; }
  191. - (void)seekTo:(float)timeSeconds { [self seekToTimestamp: CMTimeMakeWithSeconds(timeSeconds, 1)]; }
  192. - (void)seekToTimestamp:(CMTime)time
  193. {
  194. [_player seekToTime: time toleranceBefore: kCMTimeZero toleranceAfter: kCMTimeZero];
  195. _curFrameTimestamp = _lastFrameTimestamp = time;
  196. }
  197. - (intptr_t)curFrameTexture
  198. {
  199. if (!_reader)
  200. return 0;
  201. intptr_t curTex = CMVideoSampling_LastSampledTexture(&_videoSampling);
  202. CMTime time = [_player currentTime];
  203. // if we have changed audio route and due to current category apple decided to pause playback - resume automatically
  204. if (_AudioRouteWasChanged && _player.rate == 0.0f)
  205. _player.rate = 1.0f;
  206. if (CMTimeCompare(time, _curTime) == 0 || _reader.status != AVAssetReaderStatusReading)
  207. return curTex;
  208. _curTime = time;
  209. while (_reader.status == AVAssetReaderStatusReading && CMTimeCompare(_curFrameTimestamp, _curTime) <= 0)
  210. {
  211. if (_cmSampleBuffer)
  212. CFRelease(_cmSampleBuffer);
  213. // TODO: properly handle ending
  214. _cmSampleBuffer = [_videoOut copyNextSampleBuffer];
  215. if (_cmSampleBuffer == 0)
  216. {
  217. [self cleanupCVTextureCache];
  218. return 0;
  219. }
  220. _curFrameTimestamp = CMSampleBufferGetPresentationTimeStamp(_cmSampleBuffer);
  221. }
  222. if (CMTimeCompare(_lastFrameTimestamp, _curFrameTimestamp) < 0)
  223. {
  224. _lastFrameTimestamp = _curFrameTimestamp;
  225. size_t w, h;
  226. curTex = CMVideoSampling_SampleBuffer(&_videoSampling, _cmSampleBuffer, &w, &h);
  227. _videoSize = CGSizeMake(w, h);
  228. }
  229. return curTex;
  230. }
  231. - (BOOL)setAudioVolume:(float)volume
  232. {
  233. if (!_playerReady)
  234. return NO;
  235. NSArray* audio = [_playerItem.asset tracksWithMediaType: AVMediaTypeAudio];
  236. NSMutableArray* params = [NSMutableArray array];
  237. for (AVAssetTrack* track in audio)
  238. {
  239. AVMutableAudioMixInputParameters* inputParams = [AVMutableAudioMixInputParameters audioMixInputParameters];
  240. [inputParams setVolume: volume atTime: kCMTimeZero];
  241. [inputParams setTrackID: [track trackID]];
  242. [params addObject: inputParams];
  243. }
  244. AVMutableAudioMix* audioMix = [AVMutableAudioMix audioMix];
  245. [audioMix setInputParameters: params];
  246. [_playerItem setAudioMix: audioMix];
  247. return YES;
  248. }
  249. - (void)playerItemDidReachEnd:(NSNotification*)notification
  250. {
  251. [delegate onPlayerDidFinishPlayingVideo];
  252. }
  253. static bool _AudioRouteWasChanged = false;
  254. - (void)audioRouteChanged:(NSNotification*)notification
  255. {
  256. _AudioRouteWasChanged = true;
  257. }
  258. - (void)observeValueForKeyPath:(NSString*)path ofObject:(id)object change:(NSDictionary*)change context:(void*)context
  259. {
  260. BOOL reportPlayerReady = NO;
  261. if (context == _ObserveItemStatusContext)
  262. {
  263. AVPlayerStatus status = (AVPlayerStatus)[[change objectForKey: NSKeyValueChangeNewKey] integerValue];
  264. switch (status)
  265. {
  266. case AVPlayerStatusUnknown:
  267. break;
  268. case AVPlayerStatusReadyToPlay:
  269. {
  270. NSArray* video = [_playerItem.asset tracksWithMediaType: AVMediaTypeVideo];
  271. if ([video count])
  272. _videoSize = [(AVAssetTrack*)[video objectAtIndex: 0] naturalSize];
  273. _duration = [_playerItem duration];
  274. _assetReady = YES;
  275. reportPlayerReady = _itemReady;
  276. }
  277. break;
  278. case AVPlayerStatusFailed:
  279. {
  280. AVPlayerItem *playerItem = (AVPlayerItem*)object;
  281. [self reportError: playerItem.error category: "prepareAsset"];
  282. }
  283. break;
  284. }
  285. }
  286. else if (context == _ObservePlayerItemContext)
  287. {
  288. if ([change objectForKey: NSKeyValueChangeNewKey] != (id)[NSNull null])
  289. {
  290. _itemReady = YES;
  291. reportPlayerReady = _assetReady;
  292. }
  293. }
  294. else
  295. {
  296. [super observeValueForKeyPath: path ofObject: object change: change context: context];
  297. }
  298. if (reportPlayerReady)
  299. {
  300. _playerReady = YES;
  301. [delegate onPlayerReady];
  302. }
  303. }
  304. - (void)prepareAsset:(AVAsset*)asset withKeys:(NSArray*)requestedKeys
  305. {
  306. // check succesful loading
  307. for (NSString* key in requestedKeys)
  308. {
  309. NSError* error = nil;
  310. AVKeyValueStatus keyStatus = [asset statusOfValueForKey: key error: &error];
  311. if (keyStatus == AVKeyValueStatusFailed)
  312. {
  313. [self reportError: error category: "prepareAsset"];
  314. return;
  315. }
  316. }
  317. if (!asset.playable)
  318. {
  319. [self reportErrorWithString: "Item cannot be played" category: "prepareAsset"];
  320. return;
  321. }
  322. if (_playerItem)
  323. {
  324. [_playerItem removeObserver: self forKeyPath: @"status"];
  325. [[NSNotificationCenter defaultCenter] removeObserver: self name: AVPlayerItemDidPlayToEndTimeNotification object: _playerItem];
  326. _playerItem = nil;
  327. }
  328. _playerItem = [AVPlayerItem playerItemWithAsset: asset];
  329. [_playerItem addObserver: self forKeyPath: @"status"
  330. options: NSKeyValueObservingOptionInitial | NSKeyValueObservingOptionNew
  331. context: _ObserveItemStatusContext
  332. ];
  333. [[NSNotificationCenter defaultCenter] addObserver: self selector: @selector(playerItemDidReachEnd:)
  334. name: AVPlayerItemDidPlayToEndTimeNotification object: _playerItem
  335. ];
  336. if (!_player)
  337. {
  338. _player = [AVPlayer playerWithPlayerItem: _playerItem];
  339. [_player addObserver: self forKeyPath: @"currentItem"
  340. options: NSKeyValueObservingOptionInitial | NSKeyValueObservingOptionNew
  341. context: _ObservePlayerItemContext
  342. ];
  343. [_player setAllowsExternalPlayback: NO];
  344. // we want to subscribe to route change notifications, for that we need audio session active
  345. // and in case FMOD wasnt used up to this point it is still not active
  346. [[AVAudioSession sharedInstance] setActive: YES error: nil];
  347. [[NSNotificationCenter defaultCenter] addObserver: self selector: @selector(audioRouteChanged:)
  348. name: AVAudioSessionRouteChangeNotification object: nil
  349. ];
  350. }
  351. if (_player.currentItem != _playerItem)
  352. [_player replaceCurrentItemWithPlayerItem: _playerItem];
  353. else
  354. [_player seekToTime: kCMTimeZero];
  355. }
  356. - (BOOL)prepareReader
  357. {
  358. if (!_playerReady)
  359. return NO;
  360. [self cleanupAssetReader];
  361. AVURLAsset* asset = (AVURLAsset*)_playerItem.asset;
  362. if (![asset.URL isFileURL])
  363. {
  364. [self reportErrorWithString: "non-file url. no video to texture." category: "prepareReader"];
  365. return NO;
  366. }
  367. NSError* error = nil;
  368. _reader = [AVAssetReader assetReaderWithAsset: _playerItem.asset error: &error];
  369. if (error)
  370. [self reportError: error category: "prepareReader"];
  371. _reader.timeRange = CMTimeRangeMake(kCMTimeZero, _duration);
  372. AVAssetTrack* videoTrack = [[_playerItem.asset tracksWithMediaType: AVMediaTypeVideo] objectAtIndex: 0];
  373. NSDictionary* options = @{ (NSString*)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_32BGRA) };
  374. _videoOut = [[AVAssetReaderTrackOutput alloc] initWithTrack: videoTrack outputSettings: options];
  375. _videoOut.alwaysCopiesSampleData = NO;
  376. if (![_reader canAddOutput: _videoOut])
  377. {
  378. [self reportErrorWithString: "canAddOutput returned false" category: "prepareReader"];
  379. return NO;
  380. }
  381. [_reader addOutput: _videoOut];
  382. if (![_reader startReading])
  383. {
  384. [self reportError: [_reader error] category: "prepareReader"];
  385. return NO;
  386. }
  387. [self cleanupCVTextureCache];
  388. CMVideoSampling_Initialize(&_videoSampling);
  389. return YES;
  390. }
  391. @end