VideoPlayer.mm 13 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482
  1. #include "VideoPlayer.h"
  2. #include "CVTextureCache.h"
  3. #include "CMVideoSampling.h"
  4. #include "GlesHelper.h"
  5. #import <AVFoundation/AVFoundation.h>
  6. static void* _ObserveItemStatusContext = (void*)0x1;
  7. static void* _ObservePlayerItemContext = (void*)0x2;
  8. @implementation VideoPlayerView
  9. + (Class)layerClass
  10. {
  11. return [AVPlayerLayer class];
  12. }
  13. - (AVPlayer*)player
  14. {
  15. return [(AVPlayerLayer*)[self layer] player];
  16. }
  17. - (void)setPlayer:(AVPlayer*)player
  18. {
  19. [(AVPlayerLayer*)[self layer] setPlayer: player];
  20. }
  21. - (void)dealloc
  22. {
  23. self.player = nil;
  24. }
  25. @end
  26. @implementation VideoPlayer
  27. {
  28. AVPlayerItem* _playerItem;
  29. AVPlayer* _player;
  30. AVAssetReader* _reader;
  31. AVAssetReaderTrackOutput* _videoOut;
  32. CMSampleBufferRef _cmSampleBuffer;
  33. CMVideoSampling _videoSampling;
  34. CMTime _duration;
  35. CMTime _curTime;
  36. CMTime _curFrameTimestamp;
  37. CMTime _lastFrameTimestamp;
  38. CGSize _videoSize;
  39. BOOL _playerReady;
  40. // we need to have both because the order of asset/item getting ready is not strict
  41. BOOL _assetReady;
  42. BOOL _itemReady;
  43. }
  44. @synthesize delegate;
  45. @synthesize player = _player;
  46. - (BOOL)readyToPlay { return _playerReady; }
  47. - (CGSize)videoSize { return _videoSize; }
  48. - (CMTime)duration { return _duration; }
  49. - (float)durationSeconds { return CMTIME_IS_VALID(_duration) ? (float)CMTimeGetSeconds(_duration) : 0.0f; }
  50. + (BOOL)CanPlayToTexture:(NSURL*)url { return [url isFileURL]; }
  51. + (BOOL)CheckScalingModeAspectFill:(CGSize)videoSize screenSize:(CGSize)screenSize
  52. {
  53. BOOL ret = NO;
  54. CGFloat screenAspect = (screenSize.width / screenSize.height);
  55. CGFloat videoAspect = (videoSize.width / videoSize.height);
  56. CGFloat width = ceilf(videoSize.width * videoAspect / screenAspect);
  57. CGFloat height = ceilf(videoSize.height * videoAspect / screenAspect);
  58. // Do additional input video and device resolution aspect ratio
  59. // rounding check to see if the width and height values are still
  60. // the ~same.
  61. //
  62. // If they still match, we can change the video scaling mode from
  63. // aspectFit to aspectFill, this works around some off-by-one scaling
  64. // errors with certain screen size and video resolution combos
  65. //
  66. // TODO: Shouldn't harm to extend width/height check to
  67. // match values within -1..+1 range from the original
  68. if (videoSize.width == width && videoSize.height == height)
  69. {
  70. ret = YES;
  71. }
  72. return ret;
  73. }
  74. - (void)reportError:(NSError*)error category:(const char*)category
  75. {
  76. ::printf("[%s]Error: %s\n", category, [[error localizedDescription] UTF8String]);
  77. ::printf("%s\n", [[error localizedFailureReason] UTF8String]);
  78. [delegate onPlayerError: error];
  79. }
  80. - (void)reportErrorWithString:(const char*)error category:(const char*)category
  81. {
  82. ::printf("[%s]Error: %s\n", category, error);
  83. [delegate onPlayerError: nil];
  84. }
  85. - (id)init
  86. {
  87. if ((self = [super init]))
  88. {
  89. _duration = _curTime = kCMTimeZero;
  90. _curFrameTimestamp = _lastFrameTimestamp = kCMTimeZero;
  91. }
  92. return self;
  93. }
  94. - (void)cleanupCVTextureCache
  95. {
  96. if (_cmSampleBuffer)
  97. {
  98. CFRelease(_cmSampleBuffer);
  99. _cmSampleBuffer = 0;
  100. }
  101. CMVideoSampling_Uninitialize(&_videoSampling);
  102. }
  103. - (void)cleanupAssetReader
  104. {
  105. if (_reader)
  106. [_reader cancelReading];
  107. _reader = nil;
  108. _videoOut = nil;
  109. }
  110. - (void)cleanupPlayer
  111. {
  112. if (_player)
  113. {
  114. [[NSNotificationCenter defaultCenter] removeObserver: self name: AVAudioSessionRouteChangeNotification object: nil];
  115. [_player.currentItem removeObserver: self forKeyPath: @"status"];
  116. [_player removeObserver: self forKeyPath: @"currentItem"];
  117. [_player pause];
  118. _player = nil;
  119. }
  120. if (_playerItem)
  121. {
  122. [[NSNotificationCenter defaultCenter] removeObserver: self name: AVPlayerItemDidPlayToEndTimeNotification object: _playerItem];
  123. _playerItem = nil;
  124. }
  125. }
  126. - (void)unloadPlayer
  127. {
  128. [self cleanupCVTextureCache];
  129. [self cleanupAssetReader];
  130. [self cleanupPlayer];
  131. _videoSize = CGSizeMake(0, 0);
  132. _duration = _curTime = kCMTimeZero;
  133. _curFrameTimestamp = _lastFrameTimestamp = kCMTimeZero;
  134. self->_playerReady = self->_assetReady = self->_itemReady = NO;
  135. }
  136. - (BOOL)loadVideo:(NSURL*)url
  137. {
  138. AVURLAsset* asset = [AVURLAsset URLAssetWithURL: url options: nil];
  139. if (!asset)
  140. return NO;
  141. NSArray* requestedKeys = @[@"tracks", @"playable"];
  142. [asset loadValuesAsynchronouslyForKeys: requestedKeys completionHandler:^{
  143. dispatch_async(dispatch_get_main_queue(), ^{
  144. [self prepareAsset: asset withKeys: requestedKeys];
  145. });
  146. }];
  147. return YES;
  148. }
  149. - (BOOL)_playWithPrepareBlock:(BOOL (^)())preparePlaybackBlock
  150. {
  151. if (!_playerReady)
  152. return NO;
  153. if (preparePlaybackBlock && preparePlaybackBlock() == NO)
  154. return NO;
  155. // do not do seekTo and setRate here, it seems that http streaming may hang sometimes if you do so. go figure
  156. _curFrameTimestamp = _lastFrameTimestamp = kCMTimeZero;
  157. [_player play];
  158. return YES;
  159. }
  160. - (BOOL)playToView:(VideoPlayerView*)view
  161. {
  162. return [self _playWithPrepareBlock:^() {
  163. view.player = _player;
  164. return YES;
  165. }];
  166. }
  167. - (BOOL)playToTexture
  168. {
  169. return [self _playWithPrepareBlock:^() {
  170. return [self prepareReader];
  171. }];
  172. }
  173. - (BOOL)playVideoPlayer
  174. {
  175. return [self _playWithPrepareBlock: nil];
  176. }
  177. - (BOOL)isPlaying { return _playerReady && _player.rate != 0.0f; }
  178. - (void)pause
  179. {
  180. if (_playerReady && _player.rate != 0.0f)
  181. [_player pause];
  182. }
  183. - (void)resume
  184. {
  185. if (_playerReady && _player.rate == 0.0f)
  186. [_player play];
  187. }
  188. - (void)rewind { [self seekToTimestamp: kCMTimeZero]; }
  189. - (void)seekTo:(float)timeSeconds { [self seekToTimestamp: CMTimeMakeWithSeconds(timeSeconds, 1)]; }
  190. - (void)seekToTimestamp:(CMTime)time
  191. {
  192. [_player seekToTime: time];
  193. _curFrameTimestamp = _lastFrameTimestamp = time;
  194. }
  195. - (intptr_t)curFrameTexture
  196. {
  197. if (!_reader)
  198. return 0;
  199. intptr_t curTex = CMVideoSampling_LastSampledTexture(&_videoSampling);
  200. CMTime time = [_player currentTime];
  201. // if we have changed audio route and due to current category apple decided to pause playback - resume automatically
  202. if (_AudioRouteWasChanged && _player.rate == 0.0f)
  203. _player.rate = 1.0f;
  204. if (CMTimeCompare(time, _curTime) == 0 || _reader.status != AVAssetReaderStatusReading)
  205. return curTex;
  206. _curTime = time;
  207. while (_reader.status == AVAssetReaderStatusReading && CMTimeCompare(_curFrameTimestamp, _curTime) <= 0)
  208. {
  209. if (_cmSampleBuffer)
  210. CFRelease(_cmSampleBuffer);
  211. // TODO: properly handle ending
  212. _cmSampleBuffer = [_videoOut copyNextSampleBuffer];
  213. if (_cmSampleBuffer == 0)
  214. {
  215. [self cleanupCVTextureCache];
  216. return 0;
  217. }
  218. _curFrameTimestamp = CMSampleBufferGetPresentationTimeStamp(_cmSampleBuffer);
  219. }
  220. if (CMTimeCompare(_lastFrameTimestamp, _curFrameTimestamp) < 0)
  221. {
  222. _lastFrameTimestamp = _curFrameTimestamp;
  223. size_t w, h;
  224. curTex = CMVideoSampling_SampleBuffer(&_videoSampling, _cmSampleBuffer, &w, &h);
  225. _videoSize = CGSizeMake(w, h);
  226. }
  227. return curTex;
  228. }
  229. - (BOOL)setAudioVolume:(float)volume
  230. {
  231. if (!_playerReady)
  232. return NO;
  233. NSArray* audio = [_playerItem.asset tracksWithMediaType: AVMediaTypeAudio];
  234. NSMutableArray* params = [NSMutableArray array];
  235. for (AVAssetTrack* track in audio)
  236. {
  237. AVMutableAudioMixInputParameters* inputParams = [AVMutableAudioMixInputParameters audioMixInputParameters];
  238. [inputParams setVolume: volume atTime: kCMTimeZero];
  239. [inputParams setTrackID: [track trackID]];
  240. [params addObject: inputParams];
  241. }
  242. AVMutableAudioMix* audioMix = [AVMutableAudioMix audioMix];
  243. [audioMix setInputParameters: params];
  244. [_playerItem setAudioMix: audioMix];
  245. return YES;
  246. }
  247. - (void)playerItemDidReachEnd:(NSNotification*)notification
  248. {
  249. [delegate onPlayerDidFinishPlayingVideo];
  250. }
  251. static bool _AudioRouteWasChanged = false;
  252. - (void)audioRouteChanged:(NSNotification*)notification
  253. {
  254. _AudioRouteWasChanged = true;
  255. }
  256. - (void)observeValueForKeyPath:(NSString*)path ofObject:(id)object change:(NSDictionary*)change context:(void*)context
  257. {
  258. BOOL reportPlayerReady = NO;
  259. if (context == _ObserveItemStatusContext)
  260. {
  261. AVPlayerStatus status = (AVPlayerStatus)[[change objectForKey: NSKeyValueChangeNewKey] integerValue];
  262. switch (status)
  263. {
  264. case AVPlayerStatusUnknown:
  265. break;
  266. case AVPlayerStatusReadyToPlay:
  267. {
  268. NSArray* video = [_playerItem.asset tracksWithMediaType: AVMediaTypeVideo];
  269. if ([video count])
  270. _videoSize = [(AVAssetTrack*)[video objectAtIndex: 0] naturalSize];
  271. _duration = [_playerItem duration];
  272. _assetReady = YES;
  273. reportPlayerReady = _itemReady;
  274. }
  275. break;
  276. case AVPlayerStatusFailed:
  277. {
  278. AVPlayerItem *playerItem = (AVPlayerItem*)object;
  279. [self reportError: playerItem.error category: "prepareAsset"];
  280. }
  281. break;
  282. }
  283. }
  284. else if (context == _ObservePlayerItemContext)
  285. {
  286. if ([change objectForKey: NSKeyValueChangeNewKey] != (id)[NSNull null])
  287. {
  288. _itemReady = YES;
  289. reportPlayerReady = _assetReady;
  290. }
  291. }
  292. else
  293. {
  294. [super observeValueForKeyPath: path ofObject: object change: change context: context];
  295. }
  296. if (reportPlayerReady)
  297. {
  298. _playerReady = YES;
  299. [delegate onPlayerReady];
  300. }
  301. }
  302. - (void)prepareAsset:(AVAsset*)asset withKeys:(NSArray*)requestedKeys
  303. {
  304. // check succesful loading
  305. for (NSString* key in requestedKeys)
  306. {
  307. NSError* error = nil;
  308. AVKeyValueStatus keyStatus = [asset statusOfValueForKey: key error: &error];
  309. if (keyStatus == AVKeyValueStatusFailed)
  310. {
  311. [self reportError: error category: "prepareAsset"];
  312. return;
  313. }
  314. }
  315. if (!asset.playable)
  316. {
  317. [self reportErrorWithString: "Item cannot be played" category: "prepareAsset"];
  318. return;
  319. }
  320. if (_playerItem)
  321. {
  322. [_playerItem removeObserver: self forKeyPath: @"status"];
  323. [[NSNotificationCenter defaultCenter] removeObserver: self name: AVPlayerItemDidPlayToEndTimeNotification object: _playerItem];
  324. _playerItem = nil;
  325. }
  326. _playerItem = [AVPlayerItem playerItemWithAsset: asset];
  327. [_playerItem addObserver: self forKeyPath: @"status"
  328. options: NSKeyValueObservingOptionInitial | NSKeyValueObservingOptionNew
  329. context: _ObserveItemStatusContext
  330. ];
  331. [[NSNotificationCenter defaultCenter] addObserver: self selector: @selector(playerItemDidReachEnd:)
  332. name: AVPlayerItemDidPlayToEndTimeNotification object: _playerItem
  333. ];
  334. if (!_player)
  335. {
  336. _player = [AVPlayer playerWithPlayerItem: _playerItem];
  337. [_player addObserver: self forKeyPath: @"currentItem"
  338. options: NSKeyValueObservingOptionInitial | NSKeyValueObservingOptionNew
  339. context: _ObservePlayerItemContext
  340. ];
  341. [_player setAllowsExternalPlayback: NO];
  342. // we want to subscribe to route change notifications, for that we need audio session active
  343. // and in case FMOD wasnt used up to this point it is still not active
  344. [[AVAudioSession sharedInstance] setActive: YES error: nil];
  345. [[NSNotificationCenter defaultCenter] addObserver: self selector: @selector(audioRouteChanged:)
  346. name: AVAudioSessionRouteChangeNotification object: nil
  347. ];
  348. }
  349. if (_player.currentItem != _playerItem)
  350. [_player replaceCurrentItemWithPlayerItem: _playerItem];
  351. else
  352. [_player seekToTime: kCMTimeZero];
  353. }
  354. - (BOOL)prepareReader
  355. {
  356. if (!_playerReady)
  357. return NO;
  358. [self cleanupAssetReader];
  359. AVURLAsset* asset = (AVURLAsset*)_playerItem.asset;
  360. if (![asset.URL isFileURL])
  361. {
  362. [self reportErrorWithString: "non-file url. no video to texture." category: "prepareReader"];
  363. return NO;
  364. }
  365. NSError* error = nil;
  366. _reader = [AVAssetReader assetReaderWithAsset: _playerItem.asset error: &error];
  367. if (error)
  368. [self reportError: error category: "prepareReader"];
  369. _reader.timeRange = CMTimeRangeMake(kCMTimeZero, _duration);
  370. AVAssetTrack* videoTrack = [[_playerItem.asset tracksWithMediaType: AVMediaTypeVideo] objectAtIndex: 0];
  371. NSDictionary* options = @{ (NSString*)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_32BGRA) };
  372. _videoOut = [[AVAssetReaderTrackOutput alloc] initWithTrack: videoTrack outputSettings: options];
  373. _videoOut.alwaysCopiesSampleData = NO;
  374. if (![_reader canAddOutput: _videoOut])
  375. {
  376. [self reportErrorWithString: "canAddOutput returned false" category: "prepareReader"];
  377. return NO;
  378. }
  379. [_reader addOutput: _videoOut];
  380. if (![_reader startReading])
  381. {
  382. [self reportError: [_reader error] category: "prepareReader"];
  383. return NO;
  384. }
  385. [self cleanupCVTextureCache];
  386. CMVideoSampling_Initialize(&_videoSampling);
  387. return YES;
  388. }
  389. @end